From 4f217c4222b98a9d710ea20ee5d7b5655b6dd36b Mon Sep 17 00:00:00 2001 From: TurboKach Date: Thu, 26 Jan 2023 01:26:55 +0800 Subject: [PATCH 01/27] added shell command to generate python types for latest protocol version --- .gitignore | 3 ++- README.md | 8 +++++++- update-cdp.sh | 17 +++++++++++++++++ 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100755 update-cdp.sh diff --git a/.gitignore b/.gitignore index d982227..f06f256 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,5 @@ __pycache__ /.vscode /.tool-versions /test_* -/*.log \ No newline at end of file +/*.log +.idea \ No newline at end of file diff --git a/README.md b/README.md index af99a72..f81cb61 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,13 @@ Example: ```sh cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json --output /tmp/cdp ``` -You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types. +You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types. + +Here you can find script that automatically downloads latest protocol files and generates python types +```shell +chmod +x update-cdp.sh +./update-cdp.sh +``` ## Implementation of a CDP client The `pycdp.cdp` package follows same structure of CDP domains, each domain is Python module and each command a function in that module. diff --git a/update-cdp.sh b/update-cdp.sh new file mode 100755 index 0000000..2b58e7f --- /dev/null +++ b/update-cdp.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then + rm -f devtools-protocol/* +fi + +wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json +if [ $? -ne 0 ]; then + echo "Error: Failed to download files" + exit 1 +fi + +cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output cdp/ +if [ $? -ne 0 ]; then + echo "Error: Failed to execute cdpgen" + exit 1 +fi \ No newline at end of file From e493c3c8d9dea0721bc337b75a843da61a4c4508 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sat, 18 Mar 2023 14:47:50 -0300 Subject: [PATCH 02/27] update README --- README.md | 16 ++++------------ pycdp/gen/generate.py | 2 +- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 06dfc6c..d56cda3 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,6 @@ sending JSON messages over a WebSocket. That JSON format is described by a machine-readable specification. This specification is used to automatically generate the classes and methods found in this library. -You could write a CDP client by connecting a WebSocket and then sending JSON -objects, but this would be tedious and error-prone: the Python interpreter would -not catch any typos in your JSON objects, and you wouldn't get autocomplete for -any parts of the JSON data structure. By providing a set of native Python -wrappers, this project makes it easier and faster to write CDP client code. - ## Installation You can install this library as a dependency on your project with: ``` @@ -92,9 +86,7 @@ d.addCallback(lambda *args: reactor.stop()) reactor.run() ``` -where chrome debugger is listening on `http://localhost:9222` (started by `google-chrome --remote-debugging-port=9222`). - -You also can use just the builtin CDP types with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command: +You also can use just the builtin CDP type wrappers with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command: ``` usage: cdpgen @@ -108,7 +100,7 @@ optional arguments: JSON file for the javascript protocol --output OUTPUT output path for the generated Python modules -JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol +JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol/tree/master/json ``` Example: ```sh @@ -142,10 +134,10 @@ For implementation details check out the [docs][3]. PyCDP is licensed under the MIT License.
-[1]: https://github.com/ChromeDevTools/devtools-protocol/ +[1]: https://chromedevtools.github.io/devtools-protocol/ [2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27 [3]: docs/getting_started.rst [4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol [5]: https://python-poetry.org/docs/ [6]: https://pypi.org/project/Twisted/ -[7]: https://pypi.org/project/autobahn/ \ No newline at end of file +[7]: https://pypi.org/project/autobahn/ diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py index 6ebf261..3b90ee0 100644 --- a/pycdp/gen/generate.py +++ b/pycdp/gen/generate.py @@ -1038,7 +1038,7 @@ def file_type(path: str): parser = ArgumentParser( usage='%(prog)s ', description='Generate Python types for the Chrome Devtools Protocol (CDP) specification.', - epilog='JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol' + epilog='JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol/tree/master/json' ) parser.add_argument( '--browser-protocol', From c3257f9923de6cb2c2969fe7114532e97d750419 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sat, 18 Mar 2023 15:12:44 -0300 Subject: [PATCH 03/27] fixes #5 --- README.md | 2 +- pycdp/gen/generate.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 184d003..1005745 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json - You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types. ## Implementation of a CDP client -The `pycdp.cdp` package follows same structure of CDP domains, each domain is Python module and each command a function in that module. +The `pycdp.cdp` package follows same structure of CDP domains, each domain is a Python module and each command a function in that module. Each function is a generator with a single yield which is a Python dict, on the CDP wire format, containing the message that should be sent to the browser, on resumption the generator receives the message from browser: diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py index 3b90ee0..84594b8 100644 --- a/pycdp/gen/generate.py +++ b/pycdp/gen/generate.py @@ -2,6 +2,7 @@ import os import json import typing +import shutil import builtins import logging import operator @@ -1074,6 +1075,7 @@ def file_type(path: str): for domain in domains: logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module) (output / f'{domain.module}.py').write_text(domain.generate_code()) + shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py') generate_init(output / '__init__.py', domains) (output / 'README.md').write_text(GENERATED_PACKAGE_NOTICE) (output / 'py.typed').touch() From 8c46551381a88d01c3f3daa0a3d4084c6464c7b3 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sat, 18 Mar 2023 15:16:44 -0300 Subject: [PATCH 04/27] add generated package notice in own CDP wrappers --- pycdp/gen/generate.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py index 84594b8..5544490 100644 --- a/pycdp/gen/generate.py +++ b/pycdp/gen/generate.py @@ -1020,14 +1020,10 @@ def selfgen(): with module_path.open('w') as module_file: module_file.write(domain.generate_code()) - init_path = output_path / '__init__.py' - generate_init(init_path, domains) - - docs_path = here.parent / 'docs' / 'api' - generate_docs(docs_path, domains) - - py_typed_path = output_path / 'py.typed' - py_typed_path.touch() + generate_init(output_path / '__init__.py', domains) + generate_docs(here.parent / 'docs' / 'api', domains) + (output_path / 'README.md').write_text(GENERATED_PACKAGE_NOTICE) + (output_path / 'py.typed').touch() def cdpgen(): From 63d1554a24a97751ba49f3b9da6278cc9a858d79 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sat, 18 Mar 2023 15:31:59 -0300 Subject: [PATCH 05/27] add browser instance leak warning --- pycdp/browser.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pycdp/browser.py b/pycdp/browser.py index 9c22e0c..bab421d 100644 --- a/pycdp/browser.py +++ b/pycdp/browser.py @@ -1,3 +1,4 @@ +import warnings import os import signal import shutil @@ -125,6 +126,10 @@ def _build_launch_env(self): def _configure_profile(self): pass + def __del__(self): + if self._process is not None: + warnings.warn('A BrowserLauncher instance has not closed with .kill(), it will leak') + class ChromeLauncher(BrowserLauncher): From 85a20f4b1ab7c877a2aaa91b84629d359e69c7a2 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sun, 19 Mar 2023 17:38:58 -0300 Subject: [PATCH 06/27] add safe event listener --- README.md | 13 +++++++------ pycdp/asyncio.py | 26 +++++++++++++++++++++++--- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 1005745..d355379 100644 --- a/README.md +++ b/README.md @@ -35,13 +35,14 @@ async def main(): target_id = await conn.execute(cdp.target.create_target('about:blank')) target_session = await conn.connect_session(target_id) await target_session.execute(cdp.page.enable()) - await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/')) # you may use "async for target_session.listen()" to listen multiple events, here we listen just a single event. - async with target_session.wait_for(cdp.page.DomContentEventFired): - dom = await target_session.execute(cdp.dom.get_document()) - node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p')) - js_node = await target_session.execute(cdp.dom.resolve_node(node)) - print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value) + with target_session.safe_wait_for(cdp.page.DomContentEventFired) as navigation: + await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/')) + await navigation + dom = await target_session.execute(cdp.dom.get_document()) + node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p')) + js_node = await target_session.execute(cdp.dom.resolve_node(node)) + print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value) await target_session.execute(cdp.page.close()) await conn.close() await asyncio.get_running_loop().run_in_executor(None, chrome.kill) diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py index 14323e9..52e942f 100644 --- a/pycdp/asyncio.py +++ b/pycdp/asyncio.py @@ -4,7 +4,7 @@ import itertools import typing as t from collections import defaultdict -from contextlib import asynccontextmanager +from contextlib import asynccontextmanager, contextmanager from aiohttp import ClientSession from aiohttp.client import ClientWebSocketResponse from aiohttp.http_websocket import WSMsgType, WSCloseCode @@ -118,7 +118,7 @@ def listen(self, *event_types: t.Type[T], buffer_size=100) -> t.AsyncIterator[T] return receiver.__aiter__() @asynccontextmanager - async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGenerator[T, None]: + async def wait_for(self, event_type: t.Type[T]) -> t.AsyncGenerator[T, None]: ''' Wait for an event of the given type and return it. @@ -126,10 +126,30 @@ async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGener with block. The block will not exit until the indicated event is received. ''' - async for event in self.listen(event_type, buffer_size): + async for event in self.listen(event_type, buffer_size=2): yield event return + @contextmanager + def safe_wait_for(self, event_type: t.Type[T]) -> t.Generator[t.Awaitable[T], None]: + """ + Wait for an asynchronous event. This context manager yields a awaitable that should be + awaited to receive the event. + + Use this context manager to register an event listener before performing the action which will + trigger the event like a page navigation, it avoids the race conditions of wait_for(). + """ + aevent = asyncio.create_task(self._async_wait_for(event_type)) + try: + yield aevent + finally: + if not aevent.done(): + aevent.cancel() + + async def _async_wait_for(self, event_type: t.Type[T]) -> T: + async for event in self.listen(event_type, buffer_size=2): + return event + def close_listeners(self): for listener in itertools.chain.from_iterable(self._listeners.values()): listener.close() From 929680fac065f24229fdf9aac8a7887008d1f025 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Sun, 19 Mar 2023 17:40:08 -0300 Subject: [PATCH 07/27] bump version to 1.3.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b1a77c8..dbc1529 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "python-cdp" -version = "1.2.1" +version = "1.3.0" description = "Python type wrappers for Chrome DevTools Protocol (CDP)" packages = [ {include = "pycdp"} From 42873dd9e5d76da3ae50e33ab76492f5390d94fa Mon Sep 17 00:00:00 2001 From: TurboKach Date: Mon, 20 Mar 2023 19:31:56 +0800 Subject: [PATCH 08/27] updated packages --- .gitignore | 3 +- poetry.lock | 125 ++++++++++++++++++---------------------------------- 2 files changed, 44 insertions(+), 84 deletions(-) diff --git a/.gitignore b/.gitignore index f06f256..12ce418 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ __pycache__ /.tool-versions /test_* /*.log -.idea \ No newline at end of file +.idea +.DS_Store \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index acea45d..3a2d76e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -9,16 +9,14 @@ python-versions = ">=3.6" [package.dependencies] aiosignal = ">=1.1.2" async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} attrs = ">=17.3.0" charset-normalizer = ">=2.0,<3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] -speedups = ["aiodns", "brotli", "cchardet"] +speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiosignal" @@ -47,17 +45,6 @@ category = "main" optional = false python-versions = ">=3.6" -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -category = "main" -optional = false -python-versions = ">=3.5" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -75,10 +62,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "babel" @@ -108,7 +95,7 @@ optional = false python-versions = ">=3.5.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "colorama" @@ -130,7 +117,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["tox", "bumpversion (<1)", "sphinx (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bumpversion (<1)", "sphinx (<2)", "tox"] [[package]] name = "docutils" @@ -164,23 +151,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "importlib-metadata" -version = "4.11.3" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] - [[package]] name = "inflection" version = "0.4.0" @@ -270,9 +240,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] @@ -315,7 +282,6 @@ python-versions = ">=3.5" atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} more-itertools = ">=4.0.0" packaging = "*" pluggy = ">=0.12,<1.0" @@ -350,7 +316,20 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "setuptools" +version = "65.7.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "snowballstemmer" @@ -378,6 +357,7 @@ Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" requests = ">=2.5.0" +setuptools = "*" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -388,8 +368,8 @@ sphinxcontrib-serializinghtml = "*" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.800)"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx-autodoc-typehints" @@ -403,8 +383,8 @@ python-versions = ">=3.6" Sphinx = ">=3.0" [package.extras] -test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "Sphinx (>=3.2.0)", "dataclasses"] -type_comments = ["typed-ast (>=1.4.0)"] +test = ["Sphinx (>=3.2.0)", "dataclasses", "pytest (>=3.1.0)", "sphobjinv (>=2.0)", "typing-extensions (>=3.5)"] +type-comments = ["typed-ast (>=1.4.0)"] [[package]] name = "sphinx-rtd-theme" @@ -426,7 +406,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -438,7 +418,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -450,8 +430,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" @@ -462,7 +442,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -473,7 +453,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -485,7 +465,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -500,7 +480,7 @@ python-versions = "*" name = "typing-extensions" version = "4.1.1" description = "Backported and Experimental Type Hints for Python 3.6+" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -513,8 +493,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -544,24 +524,11 @@ python-versions = ">=3.6" [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - -[[package]] -name = "zipp" -version = "3.7.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "ed29ffc1133f17161446637668c63a01554ba204abab6d4388f9da50df66b182" +python-versions = "^3.8" +content-hash = "f01943eaad90b858f6366f55b6c2d4eab02e20e85fc4df4447611a3387a152a3" [metadata.files] aiohttp = [ @@ -650,10 +617,6 @@ async-timeout = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] -asynctest = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -755,10 +718,6 @@ imagesize = [ {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, - {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, -] inflection = [ {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"}, {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"}, @@ -926,6 +885,10 @@ requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, ] +setuptools = [ + {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, + {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, +] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -1150,7 +1113,3 @@ yarl = [ {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, ] -zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, -] From 3ca893fa755cb6877058fee0af92353466f6cd08 Mon Sep 17 00:00:00 2001 From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com> Date: Fri, 21 Apr 2023 14:18:45 -0300 Subject: [PATCH 09/27] fix docs generator --- docs/api/audits.rst | 13 +++-- docs/api/css.rst | 34 +++++++++++ docs/api/debugger.rst | 9 +++ docs/api/dom.rst | 17 ++++++ docs/api/dom_storage.rst | 5 ++ docs/api/emulation.rst | 4 ++ docs/api/headless_experimental.rst | 9 +-- docs/api/media.rst | 5 ++ docs/api/network.rst | 5 ++ docs/api/page.rst | 14 +++++ docs/api/profiler.rst | 21 ------- docs/api/runtime.rst | 7 +++ docs/api/storage.rst | 90 ++++++++++++++++++++++++++++++ docs/api/system_info.rst | 2 + docs/api/target.rst | 10 ++++ docs/api/web_authn.rst | 16 +++++- pycdp/gen/generate.py | 2 +- 17 files changed, 228 insertions(+), 35 deletions(-) diff --git a/docs/api/audits.rst b/docs/api/audits.rst index 0454630..9913afb 100644 --- a/docs/api/audits.rst +++ b/docs/api/audits.rst @@ -34,22 +34,22 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json -.. autoclass:: SameSiteCookieExclusionReason +.. autoclass:: CookieExclusionReason :members: :undoc-members: :exclude-members: from_json, to_json -.. autoclass:: SameSiteCookieWarningReason +.. autoclass:: CookieWarningReason :members: :undoc-members: :exclude-members: from_json, to_json -.. autoclass:: SameSiteCookieOperation +.. autoclass:: CookieOperation :members: :undoc-members: :exclude-members: from_json, to_json -.. autoclass:: SameSiteCookieIssueDetails +.. autoclass:: CookieIssueDetails :members: :undoc-members: :exclude-members: from_json, to_json @@ -174,6 +174,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: BounceTrackingIssueDetails + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: ClientHintIssueReason :members: :undoc-members: diff --git a/docs/api/css.rst b/docs/api/css.rst index c68e7fe..c6cce76 100644 --- a/docs/api/css.rst +++ b/docs/api/css.rst @@ -44,6 +44,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: InheritedPseudoElementMatches + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: RuleMatch :members: :undoc-members: @@ -124,6 +129,21 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: CSSScope + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: CSSLayer + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: CSSLayerData + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: PlatformFontUsage :members: :undoc-members: @@ -139,6 +159,16 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: CSSTryRule + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: CSSPositionFallbackRule + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: CSSKeyframesRule :members: :undoc-members: @@ -184,6 +214,8 @@ to. For more information, see .. autofunction:: get_inline_styles_for_node +.. autofunction:: get_layers_for_node + .. autofunction:: get_matched_styles_for_node .. autofunction:: get_media_queries @@ -204,6 +236,8 @@ to. For more information, see .. autofunction:: set_rule_selector +.. autofunction:: set_scope_text + .. autofunction:: set_style_sheet_text .. autofunction:: set_style_texts diff --git a/docs/api/debugger.rst b/docs/api/debugger.rst index caa81c3..42984e3 100644 --- a/docs/api/debugger.rst +++ b/docs/api/debugger.rst @@ -63,6 +63,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: WasmDisassemblyChunk + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: ScriptLanguage :members: :undoc-members: @@ -89,6 +94,8 @@ to. For more information, see .. autofunction:: disable +.. autofunction:: disassemble_wasm_module + .. autofunction:: enable .. autofunction:: evaluate_on_call_frame @@ -101,6 +108,8 @@ to. For more information, see .. autofunction:: get_wasm_bytecode +.. autofunction:: next_wasm_disassembly_chunk + .. autofunction:: pause .. autofunction:: pause_on_async_call diff --git a/docs/api/dom.rst b/docs/api/dom.rst index d629423..85e0eed 100644 --- a/docs/api/dom.rst +++ b/docs/api/dom.rst @@ -53,6 +53,16 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: PhysicalAxes + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: LogicalAxes + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: Node :members: :undoc-members: @@ -144,6 +154,8 @@ to. For more information, see .. autofunction:: get_search_results +.. autofunction:: get_top_layer_elements + .. autofunction:: hide_highlight .. autofunction:: highlight_node @@ -253,6 +265,11 @@ you use the event's attributes. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: TopLayerElementsUpdated + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: PseudoElementRemoved :members: :undoc-members: diff --git a/docs/api/dom_storage.rst b/docs/api/dom_storage.rst index d0ab1a1..699dbfd 100644 --- a/docs/api/dom_storage.rst +++ b/docs/api/dom_storage.rst @@ -19,6 +19,11 @@ yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands. +.. autoclass:: SerializedStorageKey + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: StorageId :members: :undoc-members: diff --git a/docs/api/emulation.rst b/docs/api/emulation.rst index 5abce21..019e50d 100644 --- a/docs/api/emulation.rst +++ b/docs/api/emulation.rst @@ -76,6 +76,8 @@ to. For more information, see .. autofunction:: set_auto_dark_mode_override +.. autofunction:: set_automation_override + .. autofunction:: set_cpu_throttling_rate .. autofunction:: set_default_background_color_override @@ -96,6 +98,8 @@ to. For more information, see .. autofunction:: set_geolocation_override +.. autofunction:: set_hardware_concurrency_override + .. autofunction:: set_idle_override .. autofunction:: set_locale_override diff --git a/docs/api/headless_experimental.rst b/docs/api/headless_experimental.rst index a87a3b2..175051d 100644 --- a/docs/api/headless_experimental.rst +++ b/docs/api/headless_experimental.rst @@ -45,11 +45,4 @@ to. For more information, see Events ------ -Generally, you do not need to instantiate CDP events -yourself. Instead, the API creates events for you and then -you use the event's attributes. - -.. autoclass:: NeedsBeginFramesChanged - :members: - :undoc-members: - :exclude-members: from_json, to_json +*There are no events in this module.* diff --git a/docs/api/media.rst b/docs/api/media.rst index a167f4e..30175fd 100644 --- a/docs/api/media.rst +++ b/docs/api/media.rst @@ -44,6 +44,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: PlayerErrorSourceLocation + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: PlayerError :members: :undoc-members: diff --git a/docs/api/network.rst b/docs/api/network.rst index 025d6a3..0347d18 100644 --- a/docs/api/network.rst +++ b/docs/api/network.rst @@ -143,6 +143,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: AlternateProtocolUsage + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: Response :members: :undoc-members: diff --git a/docs/api/page.rst b/docs/api/page.rst index 3a8cf5d..79d385d 100644 --- a/docs/api/page.rst +++ b/docs/api/page.rst @@ -37,6 +37,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: AdScriptId + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: SecureContextType :members: :undoc-members: @@ -217,6 +222,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: AutoResponseMode + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: NavigationType :members: :undoc-members: @@ -288,6 +298,8 @@ to. For more information, see .. autofunction:: generate_test_report +.. autofunction:: get_ad_script_id + .. autofunction:: get_app_id .. autofunction:: get_app_manifest @@ -356,6 +368,8 @@ to. For more information, see .. autofunction:: set_lifecycle_events_enabled +.. autofunction:: set_rph_registration_mode + .. autofunction:: set_spc_transaction_mode .. autofunction:: set_touch_emulation_enabled diff --git a/docs/api/profiler.rst b/docs/api/profiler.rst index 02c25d6..6baeafa 100644 --- a/docs/api/profiler.rst +++ b/docs/api/profiler.rst @@ -45,21 +45,6 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json -.. autoclass:: TypeObject - :members: - :undoc-members: - :exclude-members: from_json, to_json - -.. autoclass:: TypeProfileEntry - :members: - :undoc-members: - :exclude-members: from_json, to_json - -.. autoclass:: ScriptTypeProfile - :members: - :undoc-members: - :exclude-members: from_json, to_json - Commands -------- @@ -84,18 +69,12 @@ to. For more information, see .. autofunction:: start_precise_coverage -.. autofunction:: start_type_profile - .. autofunction:: stop .. autofunction:: stop_precise_coverage -.. autofunction:: stop_type_profile - .. autofunction:: take_precise_coverage -.. autofunction:: take_type_profile - Events ------ diff --git a/docs/api/runtime.rst b/docs/api/runtime.rst index 056955e..a2db410 100644 --- a/docs/api/runtime.rst +++ b/docs/api/runtime.rst @@ -26,6 +26,11 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: WebDriverValue + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: RemoteObjectId :members: :undoc-members: @@ -154,6 +159,8 @@ to. For more information, see .. autofunction:: evaluate +.. autofunction:: get_exception_details + .. autofunction:: get_heap_usage .. autofunction:: get_isolate_id diff --git a/docs/api/storage.rst b/docs/api/storage.rst index 4989fb9..157a977 100644 --- a/docs/api/storage.rst +++ b/docs/api/storage.rst @@ -17,6 +17,11 @@ yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands. +.. autoclass:: SerializedStorageKey + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: StorageType :members: :undoc-members: @@ -47,6 +52,46 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: SharedStorageAccessType + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageEntry + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageMetadata + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageReportingMetadata + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageUrlWithMetadata + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageAccessParams + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: StorageBucketsDurability + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: StorageBucketInfo + :members: + :undoc-members: + :exclude-members: from_json, to_json + Commands -------- @@ -63,30 +108,60 @@ to. For more information, see .. autofunction:: clear_data_for_origin +.. autofunction:: clear_data_for_storage_key + +.. autofunction:: clear_shared_storage_entries + .. autofunction:: clear_trust_tokens +.. autofunction:: delete_shared_storage_entry + +.. autofunction:: delete_storage_bucket + .. autofunction:: get_cookies .. autofunction:: get_interest_group_details +.. autofunction:: get_shared_storage_entries + +.. autofunction:: get_shared_storage_metadata + +.. autofunction:: get_storage_key_for_frame + .. autofunction:: get_trust_tokens .. autofunction:: get_usage_and_quota .. autofunction:: override_quota_for_origin +.. autofunction:: reset_shared_storage_budget + .. autofunction:: set_cookies .. autofunction:: set_interest_group_tracking +.. autofunction:: set_shared_storage_entry + +.. autofunction:: set_shared_storage_tracking + +.. autofunction:: set_storage_bucket_tracking + .. autofunction:: track_cache_storage_for_origin +.. autofunction:: track_cache_storage_for_storage_key + .. autofunction:: track_indexed_db_for_origin +.. autofunction:: track_indexed_db_for_storage_key + .. autofunction:: untrack_cache_storage_for_origin +.. autofunction:: untrack_cache_storage_for_storage_key + .. autofunction:: untrack_indexed_db_for_origin +.. autofunction:: untrack_indexed_db_for_storage_key + Events ------ @@ -118,3 +193,18 @@ you use the event's attributes. :members: :undoc-members: :exclude-members: from_json, to_json + +.. autoclass:: SharedStorageAccessed + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: StorageBucketCreatedOrUpdated + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: StorageBucketDeleted + :members: + :undoc-members: + :exclude-members: from_json, to_json diff --git a/docs/api/system_info.rst b/docs/api/system_info.rst index 1b595c7..5dbbd7b 100644 --- a/docs/api/system_info.rst +++ b/docs/api/system_info.rst @@ -76,6 +76,8 @@ commands, and ``z`` is the return type you should pay attention to. For more information, see :ref:`Getting Started: Commands `. +.. autofunction:: get_feature_state + .. autofunction:: get_info .. autofunction:: get_process_info diff --git a/docs/api/target.rst b/docs/api/target.rst index 263ae8a..38edd3b 100644 --- a/docs/api/target.rst +++ b/docs/api/target.rst @@ -32,6 +32,16 @@ arguments to other commands. :undoc-members: :exclude-members: from_json, to_json +.. autoclass:: FilterEntry + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: TargetFilter + :members: + :undoc-members: + :exclude-members: from_json, to_json + .. autoclass:: RemoteLocation :members: :undoc-members: diff --git a/docs/api/web_authn.rst b/docs/api/web_authn.rst index 8a1a0b9..cc38dd3 100644 --- a/docs/api/web_authn.rst +++ b/docs/api/web_authn.rst @@ -82,9 +82,23 @@ to. For more information, see .. autofunction:: set_automatic_presence_simulation +.. autofunction:: set_response_override_bits + .. autofunction:: set_user_verified Events ------ -*There are no events in this module.* +Generally, you do not need to instantiate CDP events +yourself. Instead, the API creates events for you and then +you use the event's attributes. + +.. autoclass:: CredentialAdded + :members: + :undoc-members: + :exclude-members: from_json, to_json + +.. autoclass:: CredentialAsserted + :members: + :undoc-members: + :exclude-members: from_json, to_json diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py index 5544490..e7fc217 100644 --- a/pycdp/gen/generate.py +++ b/pycdp/gen/generate.py @@ -1021,7 +1021,7 @@ def selfgen(): module_file.write(domain.generate_code()) generate_init(output_path / '__init__.py', domains) - generate_docs(here.parent / 'docs' / 'api', domains) + generate_docs(here.parent.parent / 'docs' / 'api', domains) (output_path / 'README.md').write_text(GENERATED_PACKAGE_NOTICE) (output_path / 'py.typed').touch() From 735645db801cf31b67bd70e14dfc2406f311e2a3 Mon Sep 17 00:00:00 2001 From: TurboKach Date: Sat, 22 Apr 2023 02:24:46 +0800 Subject: [PATCH 10/27] updated CDP protocol --- poetry.lock | 1349 ++++++++++++++-------------- pycdp/cdp/__init__.py | 2 +- pycdp/cdp/accessibility.py | 58 +- pycdp/cdp/animation.py | 10 +- pycdp/cdp/audits.py | 261 +++--- pycdp/cdp/background_service.py | 5 + pycdp/cdp/browser.py | 28 +- pycdp/cdp/cache_storage.py | 16 +- pycdp/cdp/cast.py | 2 +- pycdp/cdp/console.py | 6 +- pycdp/cdp/css.py | 394 ++++++-- pycdp/cdp/database.py | 6 +- pycdp/cdp/debugger.py | 221 +++-- pycdp/cdp/dom.py | 165 +++- pycdp/cdp/dom_debugger.py | 6 +- pycdp/cdp/dom_snapshot.py | 117 +-- pycdp/cdp/dom_storage.py | 29 +- pycdp/cdp/emulation.py | 59 +- pycdp/cdp/fetch.py | 30 +- pycdp/cdp/headless_experimental.py | 39 +- pycdp/cdp/heap_profiler.py | 28 +- pycdp/cdp/indexed_db.py | 105 ++- pycdp/cdp/input_.py | 24 +- pycdp/cdp/io.py | 2 +- pycdp/cdp/layer_tree.py | 24 +- pycdp/cdp/log.py | 14 +- pycdp/cdp/media.py | 59 +- pycdp/cdp/network.py | 281 +++--- pycdp/cdp/overlay.py | 131 +-- pycdp/cdp/page.py | 242 +++-- pycdp/cdp/performance_timeline.py | 14 +- pycdp/cdp/profiler.py | 130 +-- pycdp/cdp/runtime.py | 190 ++-- pycdp/cdp/security.py | 16 +- pycdp/cdp/service_worker.py | 8 +- pycdp/cdp/storage.py | 660 +++++++++++++- pycdp/cdp/system_info.py | 27 +- pycdp/cdp/target.py | 97 +- pycdp/cdp/tracing.py | 39 +- pycdp/cdp/web_audio.py | 14 +- pycdp/cdp/web_authn.py | 103 ++- pycdp/gen/generate.py | 5 +- pyproject.toml | 2 +- update-cdp.sh | 38 +- 44 files changed, 3398 insertions(+), 1658 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3a2d76e..8fae0f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + [[package]] name = "aiohttp" version = "3.8.1" @@ -5,6 +7,80 @@ description = "Async http client/server framework (asyncio)" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, + {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, + {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, + {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, + {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, + {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, + {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, + {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, + {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, + {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, +] [package.dependencies] aiosignal = ">=1.1.2" @@ -20,22 +96,30 @@ speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiosignal" -version = "1.2.0" +version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] [package.dependencies] frozenlist = ">=1.1.0" [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] [[package]] name = "async-timeout" @@ -44,66 +128,94 @@ description = "Timeout context manager for asyncio programs" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] [[package]] name = "atomicwrites" -version = "1.4.0" +version = "1.4.1" description = "Atomic file writes." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] [[package]] name = "attrs" -version = "21.4.0" +version = "23.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "babel" -version = "2.9.1" +version = "2.12.1" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] [package.dependencies] -pytz = ">=2015.7" +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" +files = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] [package.extras] unicode-backport = ["unicodedata2"] [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "deprecated" @@ -112,6 +224,10 @@ description = "Python @deprecated decorator to deprecate old python classes, fun category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"}, + {file = "Deprecated-1.2.9.tar.gz", hash = "sha256:0cf37d293a96805c6afd8b5fc525cb40f23a2cac9b2d066ac3bd4b04e72ceccc"}, +] [package.dependencies] wrapt = ">=1.10,<2" @@ -126,30 +242,118 @@ description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, + {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, +] [[package]] name = "frozenlist" -version = "1.3.0" +version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "imagesize" -version = "1.3.0" +version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] [[package]] name = "inflection" @@ -158,14 +362,22 @@ description = "A port of Ruby on Rails inflector to Python" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"}, + {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"}, +] [[package]] name = "jinja2" -version = "3.1.1" +version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -175,27 +387,159 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] [[package]] name = "more-itertools" -version = "8.12.0" +version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" +files = [ + {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"}, + {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, +] [[package]] name = "multidict" -version = "6.0.2" +version = "6.0.4" description = "multidict implementation" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] [[package]] name = "mypy" @@ -204,6 +548,22 @@ description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, + {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"}, + {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"}, + {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"}, + {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"}, + {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"}, + {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"}, + {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"}, + {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"}, + {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"}, + {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"}, + {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"}, + {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"}, + {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"}, +] [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -215,22 +575,26 @@ dmypy = ["psutil (>=4.0)"] [[package]] name = "mypy-extensions" -version = "0.4.3" +version = "0.4.4" description = "Experimental type system extensions for programs checked with the mypy typechecker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.7" +files = [ + {file = "mypy_extensions-0.4.4.tar.gz", hash = "sha256:c8b707883a96efe9b4bb3aaf0dcc07e7e217d7d8368eec4db4049ee9e142f4fd"}, +] [[package]] name = "packaging" -version = "21.3" +version = "23.1" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] [[package]] name = "pluggy" @@ -239,6 +603,10 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] [package.extras] dev = ["pre-commit", "tox"] @@ -250,25 +618,25 @@ description = "library with cross-python path, ini-parsing, io, code, log facili category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] [[package]] name = "pygments" -version = "2.11.2" +version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.5" - -[[package]] -name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" -category = "dev" -optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, +] [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +plugins = ["importlib-metadata"] [[package]] name = "pytest" @@ -277,6 +645,10 @@ description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, + {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, +] [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -294,37 +666,49 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytz" -version = "2022.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.7.0" +version = "67.7.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "setuptools-67.7.1-py3-none-any.whl", hash = "sha256:6f0839fbdb7e3cfef1fc38d7954f5c1c26bf4eebb155a55c9bf8faf997b9fb67"}, + {file = "setuptools-67.7.1.tar.gz", hash = "sha256:bb16732e8eb928922eabaa022f881ae2b7cdcfaf9993ef1f5e841a96d32b8e0c"}, +] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -338,6 +722,10 @@ description = "This package provides 29 stemmers for 28 languages generated from category = "dev" optional = false python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] name = "sphinx" @@ -346,6 +734,10 @@ description = "Python documentation generator" category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, + {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, +] [package.dependencies] alabaster = ">=0.7,<0.8" @@ -378,6 +770,10 @@ description = "Type hints (PEP 484) support for the Sphinx autodoc extension" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "sphinx-autodoc-typehints-1.12.0.tar.gz", hash = "sha256:193617d9dbe0847281b1399d369e74e34cd959c82e02c7efde077fca908a9f52"}, + {file = "sphinx_autodoc_typehints-1.12.0-py3-none-any.whl", hash = "sha256:5e81776ec422dd168d688ab60f034fccfafbcd94329e9537712c93003bddc04a"}, +] [package.dependencies] Sphinx = ">=3.0" @@ -393,17 +789,25 @@ description = "Read the Docs theme for Sphinx" category = "dev" optional = false python-versions = "*" +files = [ + {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"}, + {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"}, +] [package.dependencies] sphinx = "*" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -416,6 +820,10 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -423,11 +831,15 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -440,6 +852,10 @@ description = "A sphinx extension which renders display math in HTML via JavaScr category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] [package.extras] test = ["flake8", "mypy", "pytest"] @@ -451,6 +867,10 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -463,6 +883,10 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -475,641 +899,254 @@ description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false python-versions = "*" +files = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] [[package]] name = "wrapt" -version = "1.14.0" +version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +] [[package]] name = "yarl" -version = "1.7.2" +version = "1.9.1" description = "Yet another URL library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e124b283a04cc06d22443cae536f93d86cd55108fa369f22b8fe1f2288b2fe1c"}, + {file = "yarl-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56956b13ec275de31fe4fb991510b735c4fb3e1b01600528c952b9ac90464430"}, + {file = "yarl-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecaa5755a39f6f26079bf13f336c67af589c222d76b53cd3824d3b684b84d1f1"}, + {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92a101f6d5a9464e86092adc36cd40ef23d18a25bfb1eb32eaeb62edc22776bb"}, + {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e37999e36f9f3ded78e9d839face6baa2abdf9344ea8ed2735f495736159de"}, + {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef7e2f6c47c41e234600a02e1356b799761485834fe35d4706b0094cb3a587ee"}, + {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7a0075a55380b19aa43b9e8056e128b058460d71d75018a4f9d60ace01e78c"}, + {file = "yarl-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f01351b7809182822b21061d2a4728b7b9e08f4585ba90ee4c5c4d3faa0812"}, + {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6cf47fe9df9b1ededc77e492581cdb6890a975ad96b4172e1834f1b8ba0fc3ba"}, + {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:098bdc06ffb4db39c73883325b8c738610199f5f12e85339afedf07e912a39af"}, + {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:6cdb47cbbacae8e1d7941b0d504d0235d686090eef5212ca2450525905e9cf02"}, + {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:73a4b46689f2d59c8ec6b71c9a0cdced4e7863dd6eb98a8c30ea610e191f9e1c"}, + {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65d952e464df950eed32bb5dcbc1b4443c7c2de4d7abd7265b45b1b3b27f5fa2"}, + {file = "yarl-1.9.1-cp310-cp310-win32.whl", hash = "sha256:39a7a9108e9fc633ae381562f8f0355bb4ba00355218b5fb19cf5263fcdbfa68"}, + {file = "yarl-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b63d41e0eecf3e3070d44f97456cf351fff7cb960e97ecb60a936b877ff0b4f6"}, + {file = "yarl-1.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4295790981630c4dab9d6de7b0f555a4c8defe3ed7704a8e9e595a321e59a0f5"}, + {file = "yarl-1.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b2b2382d59dec0f1fdca18ea429c4c4cee280d5e0dbc841180abb82e188cf6e9"}, + {file = "yarl-1.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:575975d28795a61e82c85f114c02333ca54cbd325fd4e4b27598c9832aa732e7"}, + {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bb794882818fae20ff65348985fdf143ea6dfaf6413814db1848120db8be33e"}, + {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89da1fd6068553e3a333011cc17ad91c414b2100c32579ddb51517edc768b49c"}, + {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d817593d345fefda2fae877accc8a0d9f47ada57086da6125fa02a62f6d1a94"}, + {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85aa6fd779e194901386709e0eedd45710b68af2709f82a84839c44314b68c10"}, + {file = "yarl-1.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eed9827033b7f67ad12cb70bd0cb59d36029144a7906694317c2dbf5c9eb5ddd"}, + {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df747104ef27ab1aa9a1145064fa9ea26ad8cf24bfcbdba7db7abf0f8b3676b9"}, + {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:efec77851231410125cb5be04ec96fa4a075ca637f415a1f2d2c900b09032a8a"}, + {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d5c407e530cf2979ea383885516ae79cc4f3c3530623acf5e42daf521f5c2564"}, + {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f76edb386178a54ea7ceffa798cb830c3c22ab50ea10dfb25dc952b04848295f"}, + {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:75676110bce59944dd48fd18d0449bd37eaeb311b38a0c768f7670864b5f8b68"}, + {file = "yarl-1.9.1-cp311-cp311-win32.whl", hash = "sha256:9ba5a18c4fbd408fe49dc5da85478a76bc75c1ce912d7fd7b43ed5297c4403e1"}, + {file = "yarl-1.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:b20a5ddc4e243cbaa54886bfe9af6ffc4ba4ef58f17f1bb691e973eb65bba84d"}, + {file = "yarl-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:791357d537a09a194f92b834f28c98d074e7297bac0a8f1d5b458a906cafa17c"}, + {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89099c887338608da935ba8bee027564a94f852ac40e472de15d8309517ad5fe"}, + {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:395ea180257a3742d09dcc5071739682a95f7874270ebe3982d6696caec75be0"}, + {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90ebaf448b5f048352ec7c76cb8d452df30c27cb6b8627dfaa9cf742a14f141a"}, + {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f878a78ed2ccfbd973cab46dd0933ecd704787724db23979e5731674d76eb36f"}, + {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390c2318d066962500045aa145f5412169bce842e734b8c3e6e3750ad5b817"}, + {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f8e73f526140c1c32f5fca4cd0bc3b511a1abcd948f45b2a38a95e4edb76ca72"}, + {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ac8e593df1fbea820da7676929f821a0c7c2cecb8477d010254ce8ed54328ea8"}, + {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:01cf88cb80411978a14aa49980968c1aeb7c18a90ac978c778250dd234d8e0ba"}, + {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:97d76a3128f48fa1c721ef8a50e2c2f549296b2402dc8a8cde12ff60ed922f53"}, + {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:01a073c9175481dfed6b40704a1b67af5a9435fc4a58a27d35fd6b303469b0c7"}, + {file = "yarl-1.9.1-cp37-cp37m-win32.whl", hash = "sha256:ecad20c3ef57c513dce22f58256361d10550a89e8eaa81d5082f36f8af305375"}, + {file = "yarl-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f5bcb80006efe9bf9f49ae89711253dd06df8053ff814622112a9219346566a7"}, + {file = "yarl-1.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7ddebeabf384099814353a2956ed3ab5dbaa6830cc7005f985fcb03b5338f05"}, + {file = "yarl-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:13a1ad1f35839b3bb5226f59816b71e243d95d623f5b392efaf8820ddb2b3cd5"}, + {file = "yarl-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0cd87949d619157a0482c6c14e5011f8bf2bc0b91cb5087414d9331f4ef02dd"}, + {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d21887cbcf6a3cc5951662d8222bc9c04e1b1d98eebe3bb659c3a04ed49b0eec"}, + {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4764114e261fe49d5df9b316b3221493d177247825c735b2aae77bc2e340d800"}, + {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abe37fd89a93ebe0010417ca671f422fa6fcffec54698f623b09f46b4d4a512"}, + {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fe3a1c073ab80a28a06f41d2b623723046709ed29faf2c56bea41848597d86"}, + {file = "yarl-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3b5f8da07a21f2e57551f88a6709c2d340866146cf7351e5207623cfe8aad16"}, + {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f6413ff5edfb9609e2769e32ce87a62353e66e75d264bf0eaad26fb9daa8f2"}, + {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b5d5fb6c94b620a7066a3adb7c246c87970f453813979818e4707ac32ce4d7bd"}, + {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f206adb89424dca4a4d0b31981869700e44cd62742527e26d6b15a510dd410a2"}, + {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44fa6158e6b4b8ccfa2872c3900a226b29e8ce543ce3e48aadc99816afa8874d"}, + {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08c8599d6aa8a24425f8635f6c06fa8726afe3be01c8e53e236f519bcfa5db5b"}, + {file = "yarl-1.9.1-cp38-cp38-win32.whl", hash = "sha256:6b09cce412386ea9b4dda965d8e78d04ac5b5792b2fa9cced3258ec69c7d1c16"}, + {file = "yarl-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:09c56a32c26e24ef98d5757c5064e252836f621f9a8b42737773aa92936b8e08"}, + {file = "yarl-1.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b86e98c3021b7e2740d8719bf074301361bf2f51221ca2765b7a58afbfbd9042"}, + {file = "yarl-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5faf3ec98747318cb980aaf9addf769da68a66431fc203a373d95d7ee9c1fbb4"}, + {file = "yarl-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a21789bdf28549d4eb1de6910cabc762c9f6ae3eef85efc1958197c1c6ef853b"}, + {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8b8d4b478a9862447daef4cafc89d87ea4ed958672f1d11db7732b77ead49cc"}, + {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:307a782736ebf994e7600dcaeea3b3113083584da567272f2075f1540919d6b3"}, + {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46c4010de941e2e1365c07fb4418ddca10fcff56305a6067f5ae857f8c98f3a7"}, + {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab67d041c78e305ff3eef5e549304d843bd9b603c8855b68484ee663374ce15"}, + {file = "yarl-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1baf8cdaaab65d9ccedbf8748d626ad648b74b0a4d033e356a2f3024709fb82f"}, + {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:27efc2e324f72df02818cd72d7674b1f28b80ab49f33a94f37c6473c8166ce49"}, + {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca14b84091700ae7c1fcd3a6000bd4ec1a3035009b8bcb94f246741ca840bb22"}, + {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c3ca8d71b23bdf164b36d06df2298ec8a5bd3de42b17bf3e0e8e6a7489195f2c"}, + {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:8c72a1dc7e2ea882cd3df0417c808ad3b69e559acdc43f3b096d67f2fb801ada"}, + {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d966cd59df9a4b218480562e8daab39e87e746b78a96add51a3ab01636fc4291"}, + {file = "yarl-1.9.1-cp39-cp39-win32.whl", hash = "sha256:518a92a34c741836a315150460b5c1c71ae782d569eabd7acf53372e437709f7"}, + {file = "yarl-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:78755ce43b6e827e65ec0c68be832f86d059fcf05d4b33562745ebcfa91b26b1"}, + {file = "yarl-1.9.1.tar.gz", hash = "sha256:5ce0bcab7ec759062c818d73837644cde567ab8aa1e0d6c45db38dfb7c284441"}, +] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" [metadata] -lock-version = "1.1" -python-versions = "^3.8" -content-hash = "f01943eaad90b858f6366f55b6c2d4eab02e20e85fc4df4447611a3387a152a3" - -[metadata.files] -aiohttp = [ - {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, - {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, - {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, - {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, - {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, - {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, - {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, - {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, - {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, - {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, - {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, - {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, - {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, - {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, - {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, - {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, -] -aiosignal = [ - {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, - {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, -] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -babel = [ - {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, - {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, -] -certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -deprecated = [ - {file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"}, - {file = "Deprecated-1.2.9.tar.gz", hash = "sha256:0cf37d293a96805c6afd8b5fc525cb40f23a2cac9b2d066ac3bd4b04e72ceccc"}, -] -docutils = [ - {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, - {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, -] -frozenlist = [ - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, - {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, - {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, - {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, - {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, - {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, - {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -imagesize = [ - {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, - {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, -] -inflection = [ - {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"}, - {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"}, -] -jinja2 = [ - {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, - {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -more-itertools = [ - {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"}, - {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"}, -] -multidict = [ - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, - {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, - {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, - {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, - {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, - {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, - {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, - {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, - {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, - {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, - {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, -] -mypy = [ - {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, - {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"}, - {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"}, - {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"}, - {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"}, - {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"}, - {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"}, - {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"}, - {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"}, - {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"}, - {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"}, - {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"}, - {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"}, - {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pygments = [ - {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, - {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, -] -pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, -] -pytest = [ - {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, - {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, -] -pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -setuptools = [ - {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, - {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -sphinx = [ - {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, - {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, -] -sphinx-autodoc-typehints = [ - {file = "sphinx-autodoc-typehints-1.12.0.tar.gz", hash = "sha256:193617d9dbe0847281b1399d369e74e34cd959c82e02c7efde077fca908a9f52"}, - {file = "sphinx_autodoc_typehints-1.12.0-py3-none-any.whl", hash = "sha256:5e81776ec422dd168d688ab60f034fccfafbcd94329e9537712c93003bddc04a"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"}, - {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -wrapt = [ - {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"}, - {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"}, - {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"}, - {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"}, - {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"}, - {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"}, - {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"}, - {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"}, - {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"}, - {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"}, - {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"}, - {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"}, - {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"}, - {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"}, - {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"}, - {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"}, - {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"}, - {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"}, - {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"}, - {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"}, - {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"}, - {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"}, - {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"}, - {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"}, - {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"}, - {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"}, - {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"}, - {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"}, - {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"}, - {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"}, - {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"}, - {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"}, - {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"}, - {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"}, - {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"}, -] -yarl = [ - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, - {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, - {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, - {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, - {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, - {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, - {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, - {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, - {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, - {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, - {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, - {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, - {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, - {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, -] +lock-version = "2.0" +python-versions = "^3.8, <3.11" +content-hash = "6411dd12806f87ff9b5989dec3c75be90fb91482635b9a0af5ea60331b5fb3d0" diff --git a/pycdp/cdp/__init__.py b/pycdp/cdp/__init__.py index bfead84..97c6897 100644 --- a/pycdp/cdp/__init__.py +++ b/pycdp/cdp/__init__.py @@ -3,4 +3,4 @@ # This file is generated from the CDP specification. If you need to make # changes, edit the generator and regenerate all of the modules. -from . import (accessibility, animation, audits, background_service, browser, css, cache_storage, cast, console, dom, dom_debugger, dom_snapshot, dom_storage, database, debugger, device_orientation, emulation, event_breakpoints, fetch, headless_experimental, heap_profiler, io, indexed_db, input_, inspector, layer_tree, log, media, memory, network, overlay, page, performance, performance_timeline, profiler, runtime, schema, security, service_worker, storage, system_info, target, tethering, tracing, web_audio, web_authn) \ No newline at end of file +from . import (accessibility, animation, audits, background_service, browser, css, cache_storage, cast, console, dom, dom_debugger, dom_snapshot, dom_storage, database, debugger, device_access, device_orientation, emulation, event_breakpoints, fed_cm, fetch, headless_experimental, heap_profiler, io, indexed_db, input_, inspector, layer_tree, log, media, memory, network, overlay, page, performance, performance_timeline, preload, profiler, runtime, schema, security, service_worker, storage, system_info, target, tethering, tracing, web_audio, web_authn) \ No newline at end of file diff --git a/pycdp/cdp/accessibility.py b/pycdp/cdp/accessibility.py index 43cfa7f..3f3c5d4 100644 --- a/pycdp/cdp/accessibility.py +++ b/pycdp/cdp/accessibility.py @@ -160,14 +160,14 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AXValueSource: return cls( type_=AXValueSourceType.from_json(json['type']), - value=AXValue.from_json(json['value']) if 'value' in json else None, - attribute=str(json['attribute']) if 'attribute' in json else None, - attribute_value=AXValue.from_json(json['attributeValue']) if 'attributeValue' in json else None, - superseded=bool(json['superseded']) if 'superseded' in json else None, - native_source=AXValueNativeSourceType.from_json(json['nativeSource']) if 'nativeSource' in json else None, - native_source_value=AXValue.from_json(json['nativeSourceValue']) if 'nativeSourceValue' in json else None, - invalid=bool(json['invalid']) if 'invalid' in json else None, - invalid_reason=str(json['invalidReason']) if 'invalidReason' in json else None, + value=AXValue.from_json(json['value']) if json.get('value', None) is not None else None, + attribute=str(json['attribute']) if json.get('attribute', None) is not None else None, + attribute_value=AXValue.from_json(json['attributeValue']) if json.get('attributeValue', None) is not None else None, + superseded=bool(json['superseded']) if json.get('superseded', None) is not None else None, + native_source=AXValueNativeSourceType.from_json(json['nativeSource']) if json.get('nativeSource', None) is not None else None, + native_source_value=AXValue.from_json(json['nativeSourceValue']) if json.get('nativeSourceValue', None) is not None else None, + invalid=bool(json['invalid']) if json.get('invalid', None) is not None else None, + invalid_reason=str(json['invalidReason']) if json.get('invalidReason', None) is not None else None, ) @@ -195,8 +195,8 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AXRelatedNode: return cls( backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']), - idref=str(json['idref']) if 'idref' in json else None, - text=str(json['text']) if 'text' in json else None, + idref=str(json['idref']) if json.get('idref', None) is not None else None, + text=str(json['text']) if json.get('text', None) is not None else None, ) @@ -254,9 +254,9 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AXValue: return cls( type_=AXValueType.from_json(json['type']), - value=json['value'] if 'value' in json else None, - related_nodes=[AXRelatedNode.from_json(i) for i in json['relatedNodes']] if 'relatedNodes' in json else None, - sources=[AXValueSource.from_json(i) for i in json['sources']] if 'sources' in json else None, + value=json['value'] if json.get('value', None) is not None else None, + related_nodes=[AXRelatedNode.from_json(i) for i in json['relatedNodes']] if json.get('relatedNodes', None) is not None else None, + sources=[AXValueSource.from_json(i) for i in json['sources']] if json.get('sources', None) is not None else None, ) @@ -334,6 +334,9 @@ class AXNode: #: This ``Node``'s role, whether explicit or implicit. role: typing.Optional[AXValue] = None + #: This ``Node``'s Chrome raw role. + chrome_role: typing.Optional[AXValue] = None + #: The accessible name for this ``Node``. name: typing.Optional[AXValue] = None @@ -366,6 +369,8 @@ def to_json(self) -> T_JSON_DICT: json['ignoredReasons'] = [i.to_json() for i in self.ignored_reasons] if self.role is not None: json['role'] = self.role.to_json() + if self.chrome_role is not None: + json['chromeRole'] = self.chrome_role.to_json() if self.name is not None: json['name'] = self.name.to_json() if self.description is not None: @@ -389,16 +394,17 @@ def from_json(cls, json: T_JSON_DICT) -> AXNode: return cls( node_id=AXNodeId.from_json(json['nodeId']), ignored=bool(json['ignored']), - ignored_reasons=[AXProperty.from_json(i) for i in json['ignoredReasons']] if 'ignoredReasons' in json else None, - role=AXValue.from_json(json['role']) if 'role' in json else None, - name=AXValue.from_json(json['name']) if 'name' in json else None, - description=AXValue.from_json(json['description']) if 'description' in json else None, - value=AXValue.from_json(json['value']) if 'value' in json else None, - properties=[AXProperty.from_json(i) for i in json['properties']] if 'properties' in json else None, - parent_id=AXNodeId.from_json(json['parentId']) if 'parentId' in json else None, - child_ids=[AXNodeId.from_json(i) for i in json['childIds']] if 'childIds' in json else None, - backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']) if 'backendDOMNodeId' in json else None, - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None, + ignored_reasons=[AXProperty.from_json(i) for i in json['ignoredReasons']] if json.get('ignoredReasons', None) is not None else None, + role=AXValue.from_json(json['role']) if json.get('role', None) is not None else None, + chrome_role=AXValue.from_json(json['chromeRole']) if json.get('chromeRole', None) is not None else None, + name=AXValue.from_json(json['name']) if json.get('name', None) is not None else None, + description=AXValue.from_json(json['description']) if json.get('description', None) is not None else None, + value=AXValue.from_json(json['value']) if json.get('value', None) is not None else None, + properties=[AXProperty.from_json(i) for i in json['properties']] if json.get('properties', None) is not None else None, + parent_id=AXNodeId.from_json(json['parentId']) if json.get('parentId', None) is not None else None, + child_ids=[AXNodeId.from_json(i) for i in json['childIds']] if json.get('childIds', None) is not None else None, + backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']) if json.get('backendDOMNodeId', None) is not None else None, + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None, ) @@ -437,7 +443,7 @@ def get_partial_ax_tree( :param node_id: *(Optional)* Identifier of the node to get the partial accessibility tree for. :param backend_node_id: *(Optional)* Identifier of the backend node to get the partial accessibility tree for. :param object_id: *(Optional)* JavaScript object id of the node wrapper to get the partial accessibility tree for. - :param fetch_relatives: *(Optional)* Whether to fetch this nodes ancestors, siblings and children. Defaults to true. + :param fetch_relatives: *(Optional)* Whether to fetch this node's ancestors, siblings and children. Defaults to true. :returns: The ``Accessibility.AXNode`` for this DOM node, if it exists, plus its ancestors, siblings and children, if requested. ''' params: T_JSON_DICT = dict() @@ -459,7 +465,6 @@ def get_partial_ax_tree( def get_full_ax_tree( depth: typing.Optional[int] = None, - max_depth: typing.Optional[int] = None, frame_id: typing.Optional[page.FrameId] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]: ''' @@ -468,15 +473,12 @@ def get_full_ax_tree( **EXPERIMENTAL** :param depth: *(Optional)* The maximum depth at which descendants of the root node should be retrieved. If omitted, the full tree is returned. - :param max_depth: **(DEPRECATED)** *(Optional)* Deprecated. This parameter has been renamed to ```depth```. If depth is not provided, max_depth will be used. :param frame_id: *(Optional)* The frame for whose document the AX tree should be retrieved. If omited, the root frame is used. :returns: ''' params: T_JSON_DICT = dict() if depth is not None: params['depth'] = depth - if max_depth is not None: - params['max_depth'] = max_depth if frame_id is not None: params['frameId'] = frame_id.to_json() cmd_dict: T_JSON_DICT = { diff --git a/pycdp/cdp/animation.py b/pycdp/cdp/animation.py index d4c625d..a967832 100644 --- a/pycdp/cdp/animation.py +++ b/pycdp/cdp/animation.py @@ -78,8 +78,8 @@ def from_json(cls, json: T_JSON_DICT) -> Animation: start_time=float(json['startTime']), current_time=float(json['currentTime']), type_=str(json['type']), - source=AnimationEffect.from_json(json['source']) if 'source' in json else None, - css_id=str(json['cssId']) if 'cssId' in json else None, + source=AnimationEffect.from_json(json['source']) if json.get('source', None) is not None else None, + css_id=str(json['cssId']) if json.get('cssId', None) is not None else None, ) @@ -145,8 +145,8 @@ def from_json(cls, json: T_JSON_DICT) -> AnimationEffect: direction=str(json['direction']), fill=str(json['fill']), easing=str(json['easing']), - backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None, - keyframes_rule=KeyframesRule.from_json(json['keyframesRule']) if 'keyframesRule' in json else None, + backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None, + keyframes_rule=KeyframesRule.from_json(json['keyframesRule']) if json.get('keyframesRule', None) is not None else None, ) @@ -172,7 +172,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> KeyframesRule: return cls( keyframes=[KeyframeStyle.from_json(i) for i in json['keyframes']], - name=str(json['name']) if 'name' in json else None, + name=str(json['name']) if json.get('name', None) is not None else None, ) diff --git a/pycdp/cdp/audits.py b/pycdp/cdp/audits.py index 0443193..2666e89 100644 --- a/pycdp/cdp/audits.py +++ b/pycdp/cdp/audits.py @@ -66,7 +66,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AffectedRequest: return cls( request_id=network.RequestId.from_json(json['requestId']), - url=str(json['url']) if 'url' in json else None, + url=str(json['url']) if json.get('url', None) is not None else None, ) @@ -89,23 +89,25 @@ def from_json(cls, json: T_JSON_DICT) -> AffectedFrame: ) -class SameSiteCookieExclusionReason(enum.Enum): +class CookieExclusionReason(enum.Enum): EXCLUDE_SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "ExcludeSameSiteUnspecifiedTreatedAsLax" EXCLUDE_SAME_SITE_NONE_INSECURE = "ExcludeSameSiteNoneInsecure" EXCLUDE_SAME_SITE_LAX = "ExcludeSameSiteLax" EXCLUDE_SAME_SITE_STRICT = "ExcludeSameSiteStrict" EXCLUDE_INVALID_SAME_PARTY = "ExcludeInvalidSameParty" EXCLUDE_SAME_PARTY_CROSS_PARTY_CONTEXT = "ExcludeSamePartyCrossPartyContext" + EXCLUDE_DOMAIN_NON_ASCII = "ExcludeDomainNonASCII" + EXCLUDE_THIRD_PARTY_COOKIE_BLOCKED_IN_FIRST_PARTY_SET = "ExcludeThirdPartyCookieBlockedInFirstPartySet" def to_json(self) -> str: return self.value @classmethod - def from_json(cls, json: str) -> SameSiteCookieExclusionReason: + def from_json(cls, json: str) -> CookieExclusionReason: return cls(json) -class SameSiteCookieWarningReason(enum.Enum): +class CookieWarningReason(enum.Enum): WARN_SAME_SITE_UNSPECIFIED_CROSS_SITE_CONTEXT = "WarnSameSiteUnspecifiedCrossSiteContext" WARN_SAME_SITE_NONE_INSECURE = "WarnSameSiteNoneInsecure" WARN_SAME_SITE_UNSPECIFIED_LAX_ALLOW_UNSAFE = "WarnSameSiteUnspecifiedLaxAllowUnsafe" @@ -114,16 +116,18 @@ class SameSiteCookieWarningReason(enum.Enum): WARN_SAME_SITE_STRICT_CROSS_DOWNGRADE_LAX = "WarnSameSiteStrictCrossDowngradeLax" WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_STRICT = "WarnSameSiteLaxCrossDowngradeStrict" WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_LAX = "WarnSameSiteLaxCrossDowngradeLax" + WARN_ATTRIBUTE_VALUE_EXCEEDS_MAX_SIZE = "WarnAttributeValueExceedsMaxSize" + WARN_DOMAIN_NON_ASCII = "WarnDomainNonASCII" def to_json(self) -> str: return self.value @classmethod - def from_json(cls, json: str) -> SameSiteCookieWarningReason: + def from_json(cls, json: str) -> CookieWarningReason: return cls(json) -class SameSiteCookieOperation(enum.Enum): +class CookieOperation(enum.Enum): SET_COOKIE = "SetCookie" READ_COOKIE = "ReadCookie" @@ -131,24 +135,24 @@ def to_json(self) -> str: return self.value @classmethod - def from_json(cls, json: str) -> SameSiteCookieOperation: + def from_json(cls, json: str) -> CookieOperation: return cls(json) @dataclass -class SameSiteCookieIssueDetails: +class CookieIssueDetails: ''' This information is currently necessary, as the front-end has a difficult time finding a specific cookie. With this, we can convey specific error information without the cookie. ''' - cookie_warning_reasons: typing.List[SameSiteCookieWarningReason] + cookie_warning_reasons: typing.List[CookieWarningReason] - cookie_exclusion_reasons: typing.List[SameSiteCookieExclusionReason] + cookie_exclusion_reasons: typing.List[CookieExclusionReason] #: Optionally identifies the site-for-cookies and the cookie url, which #: may be used by the front-end as additional context. - operation: SameSiteCookieOperation + operation: CookieOperation #: If AffectedCookie is not set then rawCookieLine contains the raw #: Set-Cookie header string. This hints at a problem where the @@ -182,16 +186,16 @@ def to_json(self) -> T_JSON_DICT: return json @classmethod - def from_json(cls, json: T_JSON_DICT) -> SameSiteCookieIssueDetails: + def from_json(cls, json: T_JSON_DICT) -> CookieIssueDetails: return cls( - cookie_warning_reasons=[SameSiteCookieWarningReason.from_json(i) for i in json['cookieWarningReasons']], - cookie_exclusion_reasons=[SameSiteCookieExclusionReason.from_json(i) for i in json['cookieExclusionReasons']], - operation=SameSiteCookieOperation.from_json(json['operation']), - cookie=AffectedCookie.from_json(json['cookie']) if 'cookie' in json else None, - raw_cookie_line=str(json['rawCookieLine']) if 'rawCookieLine' in json else None, - site_for_cookies=str(json['siteForCookies']) if 'siteForCookies' in json else None, - cookie_url=str(json['cookieUrl']) if 'cookieUrl' in json else None, - request=AffectedRequest.from_json(json['request']) if 'request' in json else None, + cookie_warning_reasons=[CookieWarningReason.from_json(i) for i in json['cookieWarningReasons']], + cookie_exclusion_reasons=[CookieExclusionReason.from_json(i) for i in json['cookieExclusionReasons']], + operation=CookieOperation.from_json(json['operation']), + cookie=AffectedCookie.from_json(json['cookie']) if json.get('cookie', None) is not None else None, + raw_cookie_line=str(json['rawCookieLine']) if json.get('rawCookieLine', None) is not None else None, + site_for_cookies=str(json['siteForCookies']) if json.get('siteForCookies', None) is not None else None, + cookie_url=str(json['cookieUrl']) if json.get('cookieUrl', None) is not None else None, + request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None, ) @@ -209,6 +213,7 @@ def from_json(cls, json: str) -> MixedContentResolutionStatus: class MixedContentResourceType(enum.Enum): + ATTRIBUTION_SRC = "AttributionSrc" AUDIO = "Audio" BEACON = "Beacon" CSP_REPORT = "CSPReport" @@ -287,9 +292,9 @@ def from_json(cls, json: T_JSON_DICT) -> MixedContentIssueDetails: resolution_status=MixedContentResolutionStatus.from_json(json['resolutionStatus']), insecure_url=str(json['insecureURL']), main_resource_url=str(json['mainResourceURL']), - resource_type=MixedContentResourceType.from_json(json['resourceType']) if 'resourceType' in json else None, - request=AffectedRequest.from_json(json['request']) if 'request' in json else None, - frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None, + resource_type=MixedContentResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None, + request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None, + frame=AffectedFrame.from_json(json['frame']) if json.get('frame', None) is not None else None, ) @@ -342,8 +347,8 @@ def from_json(cls, json: T_JSON_DICT) -> BlockedByResponseIssueDetails: return cls( request=AffectedRequest.from_json(json['request']), reason=BlockedByResponseReason.from_json(json['reason']), - parent_frame=AffectedFrame.from_json(json['parentFrame']) if 'parentFrame' in json else None, - blocked_frame=AffectedFrame.from_json(json['blockedFrame']) if 'blockedFrame' in json else None, + parent_frame=AffectedFrame.from_json(json['parentFrame']) if json.get('parentFrame', None) is not None else None, + blocked_frame=AffectedFrame.from_json(json['blockedFrame']) if json.get('blockedFrame', None) is not None else None, ) @@ -440,7 +445,7 @@ def from_json(cls, json: T_JSON_DICT) -> SourceCodeLocation: url=str(json['url']), line_number=int(json['lineNumber']), column_number=int(json['columnNumber']), - script_id=runtime.ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None, + script_id=runtime.ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None, ) @@ -483,10 +488,10 @@ def from_json(cls, json: T_JSON_DICT) -> ContentSecurityPolicyIssueDetails: violated_directive=str(json['violatedDirective']), is_report_only=bool(json['isReportOnly']), content_security_policy_violation_type=ContentSecurityPolicyViolationType.from_json(json['contentSecurityPolicyViolationType']), - blocked_url=str(json['blockedURL']) if 'blockedURL' in json else None, - frame_ancestor=AffectedFrame.from_json(json['frameAncestor']) if 'frameAncestor' in json else None, - source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']) if 'sourceCodeLocation' in json else None, - violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None, + blocked_url=str(json['blockedURL']) if json.get('blockedURL', None) is not None else None, + frame_ancestor=AffectedFrame.from_json(json['frameAncestor']) if json.get('frameAncestor', None) is not None else None, + source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']) if json.get('sourceCodeLocation', None) is not None else None, + violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None, ) @@ -577,9 +582,9 @@ def from_json(cls, json: T_JSON_DICT) -> TrustedWebActivityIssueDetails: return cls( url=str(json['url']), violation_type=TwaQualityEnforcementViolationType.from_json(json['violationType']), - http_status_code=int(json['httpStatusCode']) if 'httpStatusCode' in json else None, - package_name=str(json['packageName']) if 'packageName' in json else None, - signature=str(json['signature']) if 'signature' in json else None, + http_status_code=int(json['httpStatusCode']) if json.get('httpStatusCode', None) is not None else None, + package_name=str(json['packageName']) if json.get('packageName', None) is not None else None, + signature=str(json['signature']) if json.get('signature', None) is not None else None, ) @@ -664,26 +669,28 @@ def from_json(cls, json: T_JSON_DICT) -> CorsIssueDetails: cors_error_status=network.CorsErrorStatus.from_json(json['corsErrorStatus']), is_warning=bool(json['isWarning']), request=AffectedRequest.from_json(json['request']), - location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None, - initiator_origin=str(json['initiatorOrigin']) if 'initiatorOrigin' in json else None, - resource_ip_address_space=network.IPAddressSpace.from_json(json['resourceIPAddressSpace']) if 'resourceIPAddressSpace' in json else None, - client_security_state=network.ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None, + location=SourceCodeLocation.from_json(json['location']) if json.get('location', None) is not None else None, + initiator_origin=str(json['initiatorOrigin']) if json.get('initiatorOrigin', None) is not None else None, + resource_ip_address_space=network.IPAddressSpace.from_json(json['resourceIPAddressSpace']) if json.get('resourceIPAddressSpace', None) is not None else None, + client_security_state=network.ClientSecurityState.from_json(json['clientSecurityState']) if json.get('clientSecurityState', None) is not None else None, ) class AttributionReportingIssueType(enum.Enum): PERMISSION_POLICY_DISABLED = "PermissionPolicyDisabled" - INVALID_ATTRIBUTION_SOURCE_EVENT_ID = "InvalidAttributionSourceEventId" - INVALID_ATTRIBUTION_DATA = "InvalidAttributionData" - ATTRIBUTION_SOURCE_UNTRUSTWORTHY_ORIGIN = "AttributionSourceUntrustworthyOrigin" - ATTRIBUTION_UNTRUSTWORTHY_ORIGIN = "AttributionUntrustworthyOrigin" - ATTRIBUTION_TRIGGER_DATA_TOO_LARGE = "AttributionTriggerDataTooLarge" - ATTRIBUTION_EVENT_SOURCE_TRIGGER_DATA_TOO_LARGE = "AttributionEventSourceTriggerDataTooLarge" - INVALID_ATTRIBUTION_SOURCE_EXPIRY = "InvalidAttributionSourceExpiry" - INVALID_ATTRIBUTION_SOURCE_PRIORITY = "InvalidAttributionSourcePriority" - INVALID_EVENT_SOURCE_TRIGGER_DATA = "InvalidEventSourceTriggerData" - INVALID_TRIGGER_PRIORITY = "InvalidTriggerPriority" - INVALID_TRIGGER_DEDUP_KEY = "InvalidTriggerDedupKey" + UNTRUSTWORTHY_REPORTING_ORIGIN = "UntrustworthyReportingOrigin" + INSECURE_CONTEXT = "InsecureContext" + INVALID_HEADER = "InvalidHeader" + INVALID_REGISTER_TRIGGER_HEADER = "InvalidRegisterTriggerHeader" + INVALID_ELIGIBLE_HEADER = "InvalidEligibleHeader" + SOURCE_AND_TRIGGER_HEADERS = "SourceAndTriggerHeaders" + SOURCE_IGNORED = "SourceIgnored" + TRIGGER_IGNORED = "TriggerIgnored" + OS_SOURCE_IGNORED = "OsSourceIgnored" + OS_TRIGGER_IGNORED = "OsTriggerIgnored" + INVALID_REGISTER_OS_SOURCE_HEADER = "InvalidRegisterOsSourceHeader" + INVALID_REGISTER_OS_TRIGGER_HEADER = "InvalidRegisterOsTriggerHeader" + WEB_AND_OS_HEADERS = "WebAndOsHeaders" def to_json(self) -> str: return self.value @@ -697,12 +704,10 @@ def from_json(cls, json: str) -> AttributionReportingIssueType: class AttributionReportingIssueDetails: ''' Details for issues around "Attribution Reporting API" usage. - Explainer: https://github.com/WICG/conversion-measurement-api + Explainer: https://github.com/WICG/attribution-reporting-api ''' violation_type: AttributionReportingIssueType - frame: typing.Optional[AffectedFrame] = None - request: typing.Optional[AffectedRequest] = None violating_node_id: typing.Optional[dom.BackendNodeId] = None @@ -712,8 +717,6 @@ class AttributionReportingIssueDetails: def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['violationType'] = self.violation_type.to_json() - if self.frame is not None: - json['frame'] = self.frame.to_json() if self.request is not None: json['request'] = self.request.to_json() if self.violating_node_id is not None: @@ -726,10 +729,9 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AttributionReportingIssueDetails: return cls( violation_type=AttributionReportingIssueType.from_json(json['violationType']), - frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None, - request=AffectedRequest.from_json(json['request']) if 'request' in json else None, - violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None, - invalid_parameter=str(json['invalidParameter']) if 'invalidParameter' in json else None, + request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None, + violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None, + invalid_parameter=str(json['invalidParameter']) if json.get('invalidParameter', None) is not None else None, ) @@ -788,12 +790,22 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> NavigatorUserAgentIssueDetails: return cls( url=str(json['url']), - location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None, + location=SourceCodeLocation.from_json(json['location']) if json.get('location', None) is not None else None, ) class GenericIssueErrorType(enum.Enum): CROSS_ORIGIN_PORTAL_POST_MESSAGE_ERROR = "CrossOriginPortalPostMessageError" + FORM_LABEL_FOR_NAME_ERROR = "FormLabelForNameError" + FORM_DUPLICATE_ID_FOR_INPUT_ERROR = "FormDuplicateIdForInputError" + FORM_INPUT_WITH_NO_LABEL_ERROR = "FormInputWithNoLabelError" + FORM_AUTOCOMPLETE_ATTRIBUTE_EMPTY_ERROR = "FormAutocompleteAttributeEmptyError" + FORM_EMPTY_ID_AND_NAME_ATTRIBUTES_FOR_INPUT_ERROR = "FormEmptyIdAndNameAttributesForInputError" + FORM_ARIA_LABELLED_BY_TO_NON_EXISTING_ID = "FormAriaLabelledByToNonExistingId" + FORM_INPUT_ASSIGNED_AUTOCOMPLETE_VALUE_TO_ID_OR_NAME_ATTRIBUTE_ERROR = "FormInputAssignedAutocompleteValueToIdOrNameAttributeError" + FORM_LABEL_HAS_NEITHER_FOR_NOR_NESTED_INPUT = "FormLabelHasNeitherForNorNestedInput" + FORM_LABEL_FOR_MATCHES_NON_EXISTING_ID_ERROR = "FormLabelForMatchesNonExistingIdError" + FORM_INPUT_HAS_WRONG_BUT_WELL_INTENDED_AUTOCOMPLETE_VALUE_ERROR = "FormInputHasWrongButWellIntendedAutocompleteValueError" def to_json(self) -> str: return self.value @@ -813,18 +825,28 @@ class GenericIssueDetails: frame_id: typing.Optional[page.FrameId] = None + violating_node_id: typing.Optional[dom.BackendNodeId] = None + + violating_node_attribute: typing.Optional[str] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['errorType'] = self.error_type.to_json() if self.frame_id is not None: json['frameId'] = self.frame_id.to_json() + if self.violating_node_id is not None: + json['violatingNodeId'] = self.violating_node_id.to_json() + if self.violating_node_attribute is not None: + json['violatingNodeAttribute'] = self.violating_node_attribute return json @classmethod def from_json(cls, json: T_JSON_DICT) -> GenericIssueDetails: return cls( error_type=GenericIssueErrorType.from_json(json['errorType']), - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None, + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None, + violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None, + violating_node_attribute=str(json['violatingNodeAttribute']) if json.get('violatingNodeAttribute', None) is not None else None, ) @@ -832,41 +854,52 @@ def from_json(cls, json: T_JSON_DICT) -> GenericIssueDetails: class DeprecationIssueDetails: ''' This issue tracks information needed to print a deprecation message. - The formatting is inherited from the old console.log version, see more at: - https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/deprecation.cc - TODO(crbug.com/1264960): Re-work format to add i18n support per: - https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/public/devtools_protocol/README.md + https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/third_party/blink/renderer/core/frame/deprecation/README.md ''' source_code_location: SourceCodeLocation - deprecation_type: str + #: One of the deprecation names from third_party/blink/renderer/core/frame/deprecation/deprecation.json5 + type_: str affected_frame: typing.Optional[AffectedFrame] = None - #: The content of the deprecation issue (this won't be translated), - #: e.g. "window.inefficientLegacyStorageMethod will be removed in M97, - #: around January 2022. Please use Web Storage or Indexed Database - #: instead. This standard was abandoned in January, 1970. See - #: https://www.chromestatus.com/feature/5684870116278272 for more details." - message: typing.Optional[str] = None - def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['sourceCodeLocation'] = self.source_code_location.to_json() - json['deprecationType'] = self.deprecation_type + json['type'] = self.type_ if self.affected_frame is not None: json['affectedFrame'] = self.affected_frame.to_json() - if self.message is not None: - json['message'] = self.message return json @classmethod def from_json(cls, json: T_JSON_DICT) -> DeprecationIssueDetails: return cls( source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']), - deprecation_type=str(json['deprecationType']), - affected_frame=AffectedFrame.from_json(json['affectedFrame']) if 'affectedFrame' in json else None, - message=str(json['message']) if 'message' in json else None, + type_=str(json['type']), + affected_frame=AffectedFrame.from_json(json['affectedFrame']) if json.get('affectedFrame', None) is not None else None, + ) + + +@dataclass +class BounceTrackingIssueDetails: + ''' + This issue warns about sites in the redirect chain of a finished navigation + that may be flagged as trackers and have their state cleared if they don't + receive a user interaction. Note that in this context 'site' means eTLD+1. + For example, if the URL ``https://example.test:80/bounce`` was in the + redirect chain, the site reported would be ``example.test``. + ''' + tracking_sites: typing.List[str] + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['trackingSites'] = [i for i in self.tracking_sites] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> BounceTrackingIssueDetails: + return cls( + tracking_sites=[str(i) for i in json['trackingSites']], ) @@ -902,28 +935,42 @@ class FederatedAuthRequestIssueReason(enum.Enum): ''' Represents the failure reason when a federated authentication reason fails. Should be updated alongside RequestIdTokenStatus in - third_party/blink/public/mojom/webid/federated_auth_request.mojom to include + third_party/blink/public/mojom/devtools/inspector_issue.mojom to include all cases except for success. ''' - APPROVAL_DECLINED = "ApprovalDeclined" + SHOULD_EMBARGO = "ShouldEmbargo" TOO_MANY_REQUESTS = "TooManyRequests" - MANIFEST_HTTP_NOT_FOUND = "ManifestHttpNotFound" - MANIFEST_NO_RESPONSE = "ManifestNoResponse" - MANIFEST_INVALID_RESPONSE = "ManifestInvalidResponse" + WELL_KNOWN_HTTP_NOT_FOUND = "WellKnownHttpNotFound" + WELL_KNOWN_NO_RESPONSE = "WellKnownNoResponse" + WELL_KNOWN_INVALID_RESPONSE = "WellKnownInvalidResponse" + WELL_KNOWN_LIST_EMPTY = "WellKnownListEmpty" + WELL_KNOWN_INVALID_CONTENT_TYPE = "WellKnownInvalidContentType" + CONFIG_NOT_IN_WELL_KNOWN = "ConfigNotInWellKnown" + WELL_KNOWN_TOO_BIG = "WellKnownTooBig" + CONFIG_HTTP_NOT_FOUND = "ConfigHttpNotFound" + CONFIG_NO_RESPONSE = "ConfigNoResponse" + CONFIG_INVALID_RESPONSE = "ConfigInvalidResponse" + CONFIG_INVALID_CONTENT_TYPE = "ConfigInvalidContentType" CLIENT_METADATA_HTTP_NOT_FOUND = "ClientMetadataHttpNotFound" CLIENT_METADATA_NO_RESPONSE = "ClientMetadataNoResponse" CLIENT_METADATA_INVALID_RESPONSE = "ClientMetadataInvalidResponse" + CLIENT_METADATA_INVALID_CONTENT_TYPE = "ClientMetadataInvalidContentType" + DISABLED_IN_SETTINGS = "DisabledInSettings" ERROR_FETCHING_SIGNIN = "ErrorFetchingSignin" INVALID_SIGNIN_RESPONSE = "InvalidSigninResponse" ACCOUNTS_HTTP_NOT_FOUND = "AccountsHttpNotFound" ACCOUNTS_NO_RESPONSE = "AccountsNoResponse" ACCOUNTS_INVALID_RESPONSE = "AccountsInvalidResponse" + ACCOUNTS_LIST_EMPTY = "AccountsListEmpty" + ACCOUNTS_INVALID_CONTENT_TYPE = "AccountsInvalidContentType" ID_TOKEN_HTTP_NOT_FOUND = "IdTokenHttpNotFound" ID_TOKEN_NO_RESPONSE = "IdTokenNoResponse" ID_TOKEN_INVALID_RESPONSE = "IdTokenInvalidResponse" ID_TOKEN_INVALID_REQUEST = "IdTokenInvalidRequest" + ID_TOKEN_INVALID_CONTENT_TYPE = "IdTokenInvalidContentType" ERROR_ID_TOKEN = "ErrorIdToken" CANCELED = "Canceled" + RP_PAGE_NOT_VISIBLE = "RpPageNotVisible" def to_json(self) -> str: return self.value @@ -963,7 +1010,7 @@ class InspectorIssueCode(enum.Enum): optional fields in InspectorIssueDetails to convey more specific information about the kind of issue. ''' - SAME_SITE_COOKIE_ISSUE = "SameSiteCookieIssue" + COOKIE_ISSUE = "CookieIssue" MIXED_CONTENT_ISSUE = "MixedContentIssue" BLOCKED_BY_RESPONSE_ISSUE = "BlockedByResponseIssue" HEAVY_AD_ISSUE = "HeavyAdIssue" @@ -979,6 +1026,7 @@ class InspectorIssueCode(enum.Enum): DEPRECATION_ISSUE = "DeprecationIssue" CLIENT_HINT_ISSUE = "ClientHintIssue" FEDERATED_AUTH_REQUEST_ISSUE = "FederatedAuthRequestIssue" + BOUNCE_TRACKING_ISSUE = "BounceTrackingIssue" def to_json(self) -> str: return self.value @@ -995,7 +1043,7 @@ class InspectorIssueDetails: specific to the kind of issue. When adding a new issue code, please also add a new optional field to this type. ''' - same_site_cookie_issue_details: typing.Optional[SameSiteCookieIssueDetails] = None + cookie_issue_details: typing.Optional[CookieIssueDetails] = None mixed_content_issue_details: typing.Optional[MixedContentIssueDetails] = None @@ -1027,10 +1075,12 @@ class InspectorIssueDetails: federated_auth_request_issue_details: typing.Optional[FederatedAuthRequestIssueDetails] = None + bounce_tracking_issue_details: typing.Optional[BounceTrackingIssueDetails] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() - if self.same_site_cookie_issue_details is not None: - json['sameSiteCookieIssueDetails'] = self.same_site_cookie_issue_details.to_json() + if self.cookie_issue_details is not None: + json['cookieIssueDetails'] = self.cookie_issue_details.to_json() if self.mixed_content_issue_details is not None: json['mixedContentIssueDetails'] = self.mixed_content_issue_details.to_json() if self.blocked_by_response_issue_details is not None: @@ -1061,27 +1111,30 @@ def to_json(self) -> T_JSON_DICT: json['clientHintIssueDetails'] = self.client_hint_issue_details.to_json() if self.federated_auth_request_issue_details is not None: json['federatedAuthRequestIssueDetails'] = self.federated_auth_request_issue_details.to_json() + if self.bounce_tracking_issue_details is not None: + json['bounceTrackingIssueDetails'] = self.bounce_tracking_issue_details.to_json() return json @classmethod def from_json(cls, json: T_JSON_DICT) -> InspectorIssueDetails: return cls( - same_site_cookie_issue_details=SameSiteCookieIssueDetails.from_json(json['sameSiteCookieIssueDetails']) if 'sameSiteCookieIssueDetails' in json else None, - mixed_content_issue_details=MixedContentIssueDetails.from_json(json['mixedContentIssueDetails']) if 'mixedContentIssueDetails' in json else None, - blocked_by_response_issue_details=BlockedByResponseIssueDetails.from_json(json['blockedByResponseIssueDetails']) if 'blockedByResponseIssueDetails' in json else None, - heavy_ad_issue_details=HeavyAdIssueDetails.from_json(json['heavyAdIssueDetails']) if 'heavyAdIssueDetails' in json else None, - content_security_policy_issue_details=ContentSecurityPolicyIssueDetails.from_json(json['contentSecurityPolicyIssueDetails']) if 'contentSecurityPolicyIssueDetails' in json else None, - shared_array_buffer_issue_details=SharedArrayBufferIssueDetails.from_json(json['sharedArrayBufferIssueDetails']) if 'sharedArrayBufferIssueDetails' in json else None, - twa_quality_enforcement_details=TrustedWebActivityIssueDetails.from_json(json['twaQualityEnforcementDetails']) if 'twaQualityEnforcementDetails' in json else None, - low_text_contrast_issue_details=LowTextContrastIssueDetails.from_json(json['lowTextContrastIssueDetails']) if 'lowTextContrastIssueDetails' in json else None, - cors_issue_details=CorsIssueDetails.from_json(json['corsIssueDetails']) if 'corsIssueDetails' in json else None, - attribution_reporting_issue_details=AttributionReportingIssueDetails.from_json(json['attributionReportingIssueDetails']) if 'attributionReportingIssueDetails' in json else None, - quirks_mode_issue_details=QuirksModeIssueDetails.from_json(json['quirksModeIssueDetails']) if 'quirksModeIssueDetails' in json else None, - navigator_user_agent_issue_details=NavigatorUserAgentIssueDetails.from_json(json['navigatorUserAgentIssueDetails']) if 'navigatorUserAgentIssueDetails' in json else None, - generic_issue_details=GenericIssueDetails.from_json(json['genericIssueDetails']) if 'genericIssueDetails' in json else None, - deprecation_issue_details=DeprecationIssueDetails.from_json(json['deprecationIssueDetails']) if 'deprecationIssueDetails' in json else None, - client_hint_issue_details=ClientHintIssueDetails.from_json(json['clientHintIssueDetails']) if 'clientHintIssueDetails' in json else None, - federated_auth_request_issue_details=FederatedAuthRequestIssueDetails.from_json(json['federatedAuthRequestIssueDetails']) if 'federatedAuthRequestIssueDetails' in json else None, + cookie_issue_details=CookieIssueDetails.from_json(json['cookieIssueDetails']) if json.get('cookieIssueDetails', None) is not None else None, + mixed_content_issue_details=MixedContentIssueDetails.from_json(json['mixedContentIssueDetails']) if json.get('mixedContentIssueDetails', None) is not None else None, + blocked_by_response_issue_details=BlockedByResponseIssueDetails.from_json(json['blockedByResponseIssueDetails']) if json.get('blockedByResponseIssueDetails', None) is not None else None, + heavy_ad_issue_details=HeavyAdIssueDetails.from_json(json['heavyAdIssueDetails']) if json.get('heavyAdIssueDetails', None) is not None else None, + content_security_policy_issue_details=ContentSecurityPolicyIssueDetails.from_json(json['contentSecurityPolicyIssueDetails']) if json.get('contentSecurityPolicyIssueDetails', None) is not None else None, + shared_array_buffer_issue_details=SharedArrayBufferIssueDetails.from_json(json['sharedArrayBufferIssueDetails']) if json.get('sharedArrayBufferIssueDetails', None) is not None else None, + twa_quality_enforcement_details=TrustedWebActivityIssueDetails.from_json(json['twaQualityEnforcementDetails']) if json.get('twaQualityEnforcementDetails', None) is not None else None, + low_text_contrast_issue_details=LowTextContrastIssueDetails.from_json(json['lowTextContrastIssueDetails']) if json.get('lowTextContrastIssueDetails', None) is not None else None, + cors_issue_details=CorsIssueDetails.from_json(json['corsIssueDetails']) if json.get('corsIssueDetails', None) is not None else None, + attribution_reporting_issue_details=AttributionReportingIssueDetails.from_json(json['attributionReportingIssueDetails']) if json.get('attributionReportingIssueDetails', None) is not None else None, + quirks_mode_issue_details=QuirksModeIssueDetails.from_json(json['quirksModeIssueDetails']) if json.get('quirksModeIssueDetails', None) is not None else None, + navigator_user_agent_issue_details=NavigatorUserAgentIssueDetails.from_json(json['navigatorUserAgentIssueDetails']) if json.get('navigatorUserAgentIssueDetails', None) is not None else None, + generic_issue_details=GenericIssueDetails.from_json(json['genericIssueDetails']) if json.get('genericIssueDetails', None) is not None else None, + deprecation_issue_details=DeprecationIssueDetails.from_json(json['deprecationIssueDetails']) if json.get('deprecationIssueDetails', None) is not None else None, + client_hint_issue_details=ClientHintIssueDetails.from_json(json['clientHintIssueDetails']) if json.get('clientHintIssueDetails', None) is not None else None, + federated_auth_request_issue_details=FederatedAuthRequestIssueDetails.from_json(json['federatedAuthRequestIssueDetails']) if json.get('federatedAuthRequestIssueDetails', None) is not None else None, + bounce_tracking_issue_details=BounceTrackingIssueDetails.from_json(json['bounceTrackingIssueDetails']) if json.get('bounceTrackingIssueDetails', None) is not None else None, ) @@ -1127,7 +1180,7 @@ def from_json(cls, json: T_JSON_DICT) -> InspectorIssue: return cls( code=InspectorIssueCode.from_json(json['code']), details=InspectorIssueDetails.from_json(json['details']), - issue_id=IssueId.from_json(json['issueId']) if 'issueId' in json else None, + issue_id=IssueId.from_json(json['issueId']) if json.get('issueId', None) is not None else None, ) @@ -1164,7 +1217,7 @@ def get_encoded_response( } json = yield cmd_dict return ( - str(json['body']) if 'body' in json else None, + str(json['body']) if json.get('body', None) is not None else None, int(json['originalSize']), int(json['encodedSize']) ) diff --git a/pycdp/cdp/background_service.py b/pycdp/cdp/background_service.py index f73331f..8fa4e98 100644 --- a/pycdp/cdp/background_service.py +++ b/pycdp/cdp/background_service.py @@ -82,6 +82,9 @@ class BackgroundServiceEvent: #: A list of event-specific information. event_metadata: typing.List[EventMetadata] + #: Storage key this event belongs to. + storage_key: str + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['timestamp'] = self.timestamp.to_json() @@ -91,6 +94,7 @@ def to_json(self) -> T_JSON_DICT: json['eventName'] = self.event_name json['instanceId'] = self.instance_id json['eventMetadata'] = [i.to_json() for i in self.event_metadata] + json['storageKey'] = self.storage_key return json @classmethod @@ -103,6 +107,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackgroundServiceEvent: event_name=str(json['eventName']), instance_id=str(json['instanceId']), event_metadata=[EventMetadata.from_json(i) for i in json['eventMetadata']], + storage_key=str(json['storageKey']), ) diff --git a/pycdp/cdp/browser.py b/pycdp/cdp/browser.py index 1646a1d..ee45c7d 100644 --- a/pycdp/cdp/browser.py +++ b/pycdp/cdp/browser.py @@ -93,11 +93,11 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> Bounds: return cls( - left=int(json['left']) if 'left' in json else None, - top=int(json['top']) if 'top' in json else None, - width=int(json['width']) if 'width' in json else None, - height=int(json['height']) if 'height' in json else None, - window_state=WindowState.from_json(json['windowState']) if 'windowState' in json else None, + left=int(json['left']) if json.get('left', None) is not None else None, + top=int(json['top']) if json.get('top', None) is not None else None, + width=int(json['width']) if json.get('width', None) is not None else None, + height=int(json['height']) if json.get('height', None) is not None else None, + window_state=WindowState.from_json(json['windowState']) if json.get('windowState', None) is not None else None, ) @@ -112,6 +112,8 @@ class PermissionType(enum.Enum): DURABLE_STORAGE = "durableStorage" FLASH = "flash" GEOLOCATION = "geolocation" + IDLE_DETECTION = "idleDetection" + LOCAL_FONTS = "localFonts" MIDI = "midi" MIDI_SYSEX = "midiSysex" NFC = "nfc" @@ -120,11 +122,13 @@ class PermissionType(enum.Enum): PERIODIC_BACKGROUND_SYNC = "periodicBackgroundSync" PROTECTED_MEDIA_IDENTIFIER = "protectedMediaIdentifier" SENSORS = "sensors" + STORAGE_ACCESS = "storageAccess" + TOP_LEVEL_STORAGE_ACCESS = "topLevelStorageAccess" VIDEO_CAPTURE = "videoCapture" VIDEO_CAPTURE_PAN_TILT_ZOOM = "videoCapturePanTiltZoom" - IDLE_DETECTION = "idleDetection" WAKE_LOCK_SCREEN = "wakeLockScreen" WAKE_LOCK_SYSTEM = "wakeLockSystem" + WINDOW_MANAGEMENT = "windowManagement" def to_json(self) -> str: return self.value @@ -187,10 +191,10 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> PermissionDescriptor: return cls( name=str(json['name']), - sysex=bool(json['sysex']) if 'sysex' in json else None, - user_visible_only=bool(json['userVisibleOnly']) if 'userVisibleOnly' in json else None, - allow_without_sanitization=bool(json['allowWithoutSanitization']) if 'allowWithoutSanitization' in json else None, - pan_tilt_zoom=bool(json['panTiltZoom']) if 'panTiltZoom' in json else None, + sysex=bool(json['sysex']) if json.get('sysex', None) is not None else None, + user_visible_only=bool(json['userVisibleOnly']) if json.get('userVisibleOnly', None) is not None else None, + allow_without_sanitization=bool(json['allowWithoutSanitization']) if json.get('allowWithoutSanitization', None) is not None else None, + pan_tilt_zoom=bool(json['panTiltZoom']) if json.get('panTiltZoom', None) is not None else None, ) @@ -490,7 +494,7 @@ def get_histograms( **EXPERIMENTAL** :param query: *(Optional)* Requested substring in name. Only histograms which have query as a substring in their name are extracted. An empty or absent query returns all histograms. - :param delta: *(Optional)* If true, retrieve delta since last call. + :param delta: *(Optional)* If true, retrieve delta since last delta call. :returns: Histograms. ''' params: T_JSON_DICT = dict() @@ -516,7 +520,7 @@ def get_histogram( **EXPERIMENTAL** :param name: Requested histogram name. - :param delta: *(Optional)* If true, retrieve delta since last call. + :param delta: *(Optional)* If true, retrieve delta since last delta call. :returns: Histogram. ''' params: T_JSON_DICT = dict() diff --git a/pycdp/cdp/cache_storage.py b/pycdp/cdp/cache_storage.py index 0110ac7..9f7e533 100644 --- a/pycdp/cdp/cache_storage.py +++ b/pycdp/cdp/cache_storage.py @@ -112,6 +112,9 @@ class Cache: #: Security origin of the cache. security_origin: str + #: Storage key of the cache. + storage_key: str + #: The name of the cache. cache_name: str @@ -119,6 +122,7 @@ def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['cacheId'] = self.cache_id.to_json() json['securityOrigin'] = self.security_origin + json['storageKey'] = self.storage_key json['cacheName'] = self.cache_name return json @@ -127,6 +131,7 @@ def from_json(cls, json: T_JSON_DICT) -> Cache: return cls( cache_id=CacheId.from_json(json['cacheId']), security_origin=str(json['securityOrigin']), + storage_key=str(json['storageKey']), cache_name=str(json['cacheName']), ) @@ -209,16 +214,21 @@ def delete_entry( def request_cache_names( - security_origin: str + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Cache]]: ''' Requests cache names. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :returns: Caches for the security origin. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key cmd_dict: T_JSON_DICT = { 'method': 'CacheStorage.requestCacheNames', 'params': params, diff --git a/pycdp/cdp/cast.py b/pycdp/cdp/cast.py index a334b30..9ff2019 100644 --- a/pycdp/cdp/cast.py +++ b/pycdp/cdp/cast.py @@ -35,7 +35,7 @@ def from_json(cls, json: T_JSON_DICT) -> Sink: return cls( name=str(json['name']), id_=str(json['id']), - session=str(json['session']) if 'session' in json else None, + session=str(json['session']) if json.get('session', None) is not None else None, ) diff --git a/pycdp/cdp/console.py b/pycdp/cdp/console.py index ba67f39..3c4addb 100644 --- a/pycdp/cdp/console.py +++ b/pycdp/cdp/console.py @@ -54,9 +54,9 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleMessage: source=str(json['source']), level=str(json['level']), text=str(json['text']), - url=str(json['url']) if 'url' in json else None, - line=int(json['line']) if 'line' in json else None, - column=int(json['column']) if 'column' in json else None, + url=str(json['url']) if json.get('url', None) is not None else None, + line=int(json['line']) if json.get('line', None) is not None else None, + column=int(json['column']) if json.get('column', None) is not None else None, ) diff --git a/pycdp/cdp/css.py b/pycdp/cdp/css.py index 3c4181d..d4b8ed9 100644 --- a/pycdp/cdp/css.py +++ b/pycdp/cdp/css.py @@ -57,10 +57,15 @@ class PseudoElementMatches: #: Matches of CSS rules applicable to the pseudo style. matches: typing.List[RuleMatch] + #: Pseudo element custom ident. + pseudo_identifier: typing.Optional[str] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['pseudoType'] = self.pseudo_type.to_json() json['matches'] = [i.to_json() for i in self.matches] + if self.pseudo_identifier is not None: + json['pseudoIdentifier'] = self.pseudo_identifier return json @classmethod @@ -68,6 +73,7 @@ def from_json(cls, json: T_JSON_DICT) -> PseudoElementMatches: return cls( pseudo_type=dom.PseudoType.from_json(json['pseudoType']), matches=[RuleMatch.from_json(i) for i in json['matches']], + pseudo_identifier=str(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None, ) @@ -93,7 +99,27 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> InheritedStyleEntry: return cls( matched_css_rules=[RuleMatch.from_json(i) for i in json['matchedCSSRules']], - inline_style=CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None, + inline_style=CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None, + ) + + +@dataclass +class InheritedPseudoElementMatches: + ''' + Inherited pseudo element matches from pseudos of an ancestor node. + ''' + #: Matches of pseudo styles from the pseudos of an ancestor node. + pseudo_elements: typing.List[PseudoElementMatches] + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['pseudoElements'] = [i.to_json() for i in self.pseudo_elements] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> InheritedPseudoElementMatches: + return cls( + pseudo_elements=[PseudoElementMatches.from_json(i) for i in json['pseudoElements']], ) @@ -144,7 +170,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> Value: return cls( text=str(json['text']), - range_=SourceRange.from_json(json['range']) if 'range' in json else None, + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, ) @@ -236,6 +262,9 @@ class CSSStyleSheetHeader: #: Whether the sourceURL field value comes from the sourceURL comment. has_source_url: typing.Optional[bool] = None + #: If the style sheet was loaded from a network resource, this indicates when the resource failed to load + loading_failed: typing.Optional[bool] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['styleSheetId'] = self.style_sheet_id.to_json() @@ -258,6 +287,8 @@ def to_json(self) -> T_JSON_DICT: json['ownerNode'] = self.owner_node.to_json() if self.has_source_url is not None: json['hasSourceURL'] = self.has_source_url + if self.loading_failed is not None: + json['loadingFailed'] = self.loading_failed return json @classmethod @@ -277,9 +308,10 @@ def from_json(cls, json: T_JSON_DICT) -> CSSStyleSheetHeader: length=float(json['length']), end_line=float(json['endLine']), end_column=float(json['endColumn']), - source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None, - owner_node=dom.BackendNodeId.from_json(json['ownerNode']) if 'ownerNode' in json else None, - has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None, + source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None, + owner_node=dom.BackendNodeId.from_json(json['ownerNode']) if json.get('ownerNode', None) is not None else None, + has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None, + loading_failed=bool(json['loadingFailed']) if json.get('loadingFailed', None) is not None else None, ) @@ -301,6 +333,9 @@ class CSSRule: #: stylesheet rules) this rule came from. style_sheet_id: typing.Optional[StyleSheetId] = None + #: Array of selectors from ancestor style rules, sorted by distance from the current rule. + nesting_selectors: typing.Optional[typing.List[str]] = None + #: Media list array (for rules involving media queries). The array enumerates media queries #: starting with the innermost one, going outwards. media: typing.Optional[typing.List[CSSMedia]] = None @@ -313,6 +348,14 @@ class CSSRule: #: The array enumerates @supports at-rules starting with the innermost one, going outwards. supports: typing.Optional[typing.List[CSSSupports]] = None + #: Cascade layer array. Contains the layer hierarchy that this rule belongs to starting + #: with the innermost layer and going outwards. + layers: typing.Optional[typing.List[CSSLayer]] = None + + #: @scope CSS at-rule array. + #: The array enumerates @scope at-rules starting with the innermost one, going outwards. + scopes: typing.Optional[typing.List[CSSScope]] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['selectorList'] = self.selector_list.to_json() @@ -320,12 +363,18 @@ def to_json(self) -> T_JSON_DICT: json['style'] = self.style.to_json() if self.style_sheet_id is not None: json['styleSheetId'] = self.style_sheet_id.to_json() + if self.nesting_selectors is not None: + json['nestingSelectors'] = [i for i in self.nesting_selectors] if self.media is not None: json['media'] = [i.to_json() for i in self.media] if self.container_queries is not None: json['containerQueries'] = [i.to_json() for i in self.container_queries] if self.supports is not None: json['supports'] = [i.to_json() for i in self.supports] + if self.layers is not None: + json['layers'] = [i.to_json() for i in self.layers] + if self.scopes is not None: + json['scopes'] = [i.to_json() for i in self.scopes] return json @classmethod @@ -334,10 +383,13 @@ def from_json(cls, json: T_JSON_DICT) -> CSSRule: selector_list=SelectorList.from_json(json['selectorList']), origin=StyleSheetOrigin.from_json(json['origin']), style=CSSStyle.from_json(json['style']), - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, - media=[CSSMedia.from_json(i) for i in json['media']] if 'media' in json else None, - container_queries=[CSSContainerQuery.from_json(i) for i in json['containerQueries']] if 'containerQueries' in json else None, - supports=[CSSSupports.from_json(i) for i in json['supports']] if 'supports' in json else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + nesting_selectors=[str(i) for i in json['nestingSelectors']] if json.get('nestingSelectors', None) is not None else None, + media=[CSSMedia.from_json(i) for i in json['media']] if json.get('media', None) is not None else None, + container_queries=[CSSContainerQuery.from_json(i) for i in json['containerQueries']] if json.get('containerQueries', None) is not None else None, + supports=[CSSSupports.from_json(i) for i in json['supports']] if json.get('supports', None) is not None else None, + layers=[CSSLayer.from_json(i) for i in json['layers']] if json.get('layers', None) is not None else None, + scopes=[CSSScope.from_json(i) for i in json['scopes']] if json.get('scopes', None) is not None else None, ) @@ -436,7 +488,7 @@ def from_json(cls, json: T_JSON_DICT) -> ShorthandEntry: return cls( name=str(json['name']), value=str(json['value']), - important=bool(json['important']) if 'important' in json else None, + important=bool(json['important']) if json.get('important', None) is not None else None, ) @@ -500,9 +552,9 @@ def from_json(cls, json: T_JSON_DICT) -> CSSStyle: return cls( css_properties=[CSSProperty.from_json(i) for i in json['cssProperties']], shorthand_entries=[ShorthandEntry.from_json(i) for i in json['shorthandEntries']], - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, - css_text=str(json['cssText']) if 'cssText' in json else None, - range_=SourceRange.from_json(json['range']) if 'range' in json else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + css_text=str(json['cssText']) if json.get('cssText', None) is not None else None, + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, ) @@ -535,6 +587,10 @@ class CSSProperty: #: The entire property range in the enclosing style declaration (if available). range_: typing.Optional[SourceRange] = None + #: Parsed longhand components of this property if it is a shorthand. + #: This field will be empty if the given property is not a shorthand. + longhand_properties: typing.Optional[typing.List[CSSProperty]] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['name'] = self.name @@ -551,6 +607,8 @@ def to_json(self) -> T_JSON_DICT: json['disabled'] = self.disabled if self.range_ is not None: json['range'] = self.range_.to_json() + if self.longhand_properties is not None: + json['longhandProperties'] = [i.to_json() for i in self.longhand_properties] return json @classmethod @@ -558,12 +616,13 @@ def from_json(cls, json: T_JSON_DICT) -> CSSProperty: return cls( name=str(json['name']), value=str(json['value']), - important=bool(json['important']) if 'important' in json else None, - implicit=bool(json['implicit']) if 'implicit' in json else None, - text=str(json['text']) if 'text' in json else None, - parsed_ok=bool(json['parsedOk']) if 'parsedOk' in json else None, - disabled=bool(json['disabled']) if 'disabled' in json else None, - range_=SourceRange.from_json(json['range']) if 'range' in json else None, + important=bool(json['important']) if json.get('important', None) is not None else None, + implicit=bool(json['implicit']) if json.get('implicit', None) is not None else None, + text=str(json['text']) if json.get('text', None) is not None else None, + parsed_ok=bool(json['parsedOk']) if json.get('parsedOk', None) is not None else None, + disabled=bool(json['disabled']) if json.get('disabled', None) is not None else None, + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + longhand_properties=[CSSProperty.from_json(i) for i in json['longhandProperties']] if json.get('longhandProperties', None) is not None else None, ) @@ -613,10 +672,10 @@ def from_json(cls, json: T_JSON_DICT) -> CSSMedia: return cls( text=str(json['text']), source=str(json['source']), - source_url=str(json['sourceURL']) if 'sourceURL' in json else None, - range_=SourceRange.from_json(json['range']) if 'range' in json else None, - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, - media_list=[MediaQuery.from_json(i) for i in json['mediaList']] if 'mediaList' in json else None, + source_url=str(json['sourceURL']) if json.get('sourceURL', None) is not None else None, + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + media_list=[MediaQuery.from_json(i) for i in json['mediaList']] if json.get('mediaList', None) is not None else None, ) @@ -682,8 +741,8 @@ def from_json(cls, json: T_JSON_DICT) -> MediaQueryExpression: value=float(json['value']), unit=str(json['unit']), feature=str(json['feature']), - value_range=SourceRange.from_json(json['valueRange']) if 'valueRange' in json else None, - computed_length=float(json['computedLength']) if 'computedLength' in json else None, + value_range=SourceRange.from_json(json['valueRange']) if json.get('valueRange', None) is not None else None, + computed_length=float(json['computedLength']) if json.get('computedLength', None) is not None else None, ) @@ -705,6 +764,12 @@ class CSSContainerQuery: #: Optional name for the container. name: typing.Optional[str] = None + #: Optional physical axes queried for the container. + physical_axes: typing.Optional[dom.PhysicalAxes] = None + + #: Optional logical axes queried for the container. + logical_axes: typing.Optional[dom.LogicalAxes] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['text'] = self.text @@ -714,15 +779,21 @@ def to_json(self) -> T_JSON_DICT: json['styleSheetId'] = self.style_sheet_id.to_json() if self.name is not None: json['name'] = self.name + if self.physical_axes is not None: + json['physicalAxes'] = self.physical_axes.to_json() + if self.logical_axes is not None: + json['logicalAxes'] = self.logical_axes.to_json() return json @classmethod def from_json(cls, json: T_JSON_DICT) -> CSSContainerQuery: return cls( text=str(json['text']), - range_=SourceRange.from_json(json['range']) if 'range' in json else None, - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, - name=str(json['name']) if 'name' in json else None, + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + name=str(json['name']) if json.get('name', None) is not None else None, + physical_axes=dom.PhysicalAxes.from_json(json['physicalAxes']) if json.get('physicalAxes', None) is not None else None, + logical_axes=dom.LogicalAxes.from_json(json['logicalAxes']) if json.get('logicalAxes', None) is not None else None, ) @@ -734,6 +805,9 @@ class CSSSupports: #: Supports rule text. text: str + #: Whether the supports condition is satisfied. + active: bool + #: The associated rule header range in the enclosing stylesheet (if #: available). range_: typing.Optional[SourceRange] = None @@ -744,6 +818,7 @@ class CSSSupports: def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['text'] = self.text + json['active'] = self.active if self.range_ is not None: json['range'] = self.range_.to_json() if self.style_sheet_id is not None: @@ -754,8 +829,107 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> CSSSupports: return cls( text=str(json['text']), - range_=SourceRange.from_json(json['range']) if 'range' in json else None, - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, + active=bool(json['active']), + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + ) + + +@dataclass +class CSSScope: + ''' + CSS Scope at-rule descriptor. + ''' + #: Scope rule text. + text: str + + #: The associated rule header range in the enclosing stylesheet (if + #: available). + range_: typing.Optional[SourceRange] = None + + #: Identifier of the stylesheet containing this object (if exists). + style_sheet_id: typing.Optional[StyleSheetId] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['text'] = self.text + if self.range_ is not None: + json['range'] = self.range_.to_json() + if self.style_sheet_id is not None: + json['styleSheetId'] = self.style_sheet_id.to_json() + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CSSScope: + return cls( + text=str(json['text']), + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + ) + + +@dataclass +class CSSLayer: + ''' + CSS Layer at-rule descriptor. + ''' + #: Layer name. + text: str + + #: The associated rule header range in the enclosing stylesheet (if + #: available). + range_: typing.Optional[SourceRange] = None + + #: Identifier of the stylesheet containing this object (if exists). + style_sheet_id: typing.Optional[StyleSheetId] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['text'] = self.text + if self.range_ is not None: + json['range'] = self.range_.to_json() + if self.style_sheet_id is not None: + json['styleSheetId'] = self.style_sheet_id.to_json() + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CSSLayer: + return cls( + text=str(json['text']), + range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + ) + + +@dataclass +class CSSLayerData: + ''' + CSS Layer data. + ''' + #: Layer name. + name: str + + #: Layer order. The order determines the order of the layer in the cascade order. + #: A higher number has higher priority in the cascade order. + order: float + + #: Direct sub-layers + sub_layers: typing.Optional[typing.List[CSSLayerData]] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['name'] = self.name + json['order'] = self.order + if self.sub_layers is not None: + json['subLayers'] = [i.to_json() for i in self.sub_layers] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CSSLayerData: + return cls( + name=str(json['name']), + order=float(json['order']), + sub_layers=[CSSLayerData.from_json(i) for i in json['subLayers']] if json.get('subLayers', None) is not None else None, ) @@ -850,6 +1024,9 @@ class FontFace: #: The font-stretch. font_stretch: str + #: The font-display. + font_display: str + #: The unicode-range. unicode_range: str @@ -869,6 +1046,7 @@ def to_json(self) -> T_JSON_DICT: json['fontVariant'] = self.font_variant json['fontWeight'] = self.font_weight json['fontStretch'] = self.font_stretch + json['fontDisplay'] = self.font_display json['unicodeRange'] = self.unicode_range json['src'] = self.src json['platformFontFamily'] = self.platform_font_family @@ -884,10 +1062,67 @@ def from_json(cls, json: T_JSON_DICT) -> FontFace: font_variant=str(json['fontVariant']), font_weight=str(json['fontWeight']), font_stretch=str(json['fontStretch']), + font_display=str(json['fontDisplay']), unicode_range=str(json['unicodeRange']), src=str(json['src']), platform_font_family=str(json['platformFontFamily']), - font_variation_axes=[FontVariationAxis.from_json(i) for i in json['fontVariationAxes']] if 'fontVariationAxes' in json else None, + font_variation_axes=[FontVariationAxis.from_json(i) for i in json['fontVariationAxes']] if json.get('fontVariationAxes', None) is not None else None, + ) + + +@dataclass +class CSSTryRule: + ''' + CSS try rule representation. + ''' + #: Parent stylesheet's origin. + origin: StyleSheetOrigin + + #: Associated style declaration. + style: CSSStyle + + #: The css style sheet identifier (absent for user agent stylesheet and user-specified + #: stylesheet rules) this rule came from. + style_sheet_id: typing.Optional[StyleSheetId] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['origin'] = self.origin.to_json() + json['style'] = self.style.to_json() + if self.style_sheet_id is not None: + json['styleSheetId'] = self.style_sheet_id.to_json() + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CSSTryRule: + return cls( + origin=StyleSheetOrigin.from_json(json['origin']), + style=CSSStyle.from_json(json['style']), + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, + ) + + +@dataclass +class CSSPositionFallbackRule: + ''' + CSS position-fallback rule representation. + ''' + name: Value + + #: List of keyframes. + try_rules: typing.List[CSSTryRule] + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['name'] = self.name.to_json() + json['tryRules'] = [i.to_json() for i in self.try_rules] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CSSPositionFallbackRule: + return cls( + name=Value.from_json(json['name']), + try_rules=[CSSTryRule.from_json(i) for i in json['tryRules']], ) @@ -949,7 +1184,7 @@ def from_json(cls, json: T_JSON_DICT) -> CSSKeyframeRule: origin=StyleSheetOrigin.from_json(json['origin']), key_text=Value.from_json(json['keyText']), style=CSSStyle.from_json(json['style']), - style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None, + style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None, ) @@ -1108,9 +1343,9 @@ def get_background_colors( } json = yield cmd_dict return ( - [str(i) for i in json['backgroundColors']] if 'backgroundColors' in json else None, - str(json['computedFontSize']) if 'computedFontSize' in json else None, - str(json['computedFontWeight']) if 'computedFontWeight' in json else None + [str(i) for i in json['backgroundColors']] if json.get('backgroundColors', None) is not None else None, + str(json['computedFontSize']) if json.get('computedFontSize', None) is not None else None, + str(json['computedFontWeight']) if json.get('computedFontWeight', None) is not None else None ) @@ -1154,14 +1389,14 @@ def get_inline_styles_for_node( } json = yield cmd_dict return ( - CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None, - CSSStyle.from_json(json['attributesStyle']) if 'attributesStyle' in json else None + CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None, + CSSStyle.from_json(json['attributesStyle']) if json.get('attributesStyle', None) is not None else None ) def get_matched_styles_for_node( node_id: dom.NodeId - ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[CSSStyle], typing.Optional[CSSStyle], typing.Optional[typing.List[RuleMatch]], typing.Optional[typing.List[PseudoElementMatches]], typing.Optional[typing.List[InheritedStyleEntry]], typing.Optional[typing.List[CSSKeyframesRule]]]]: + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[CSSStyle], typing.Optional[CSSStyle], typing.Optional[typing.List[RuleMatch]], typing.Optional[typing.List[PseudoElementMatches]], typing.Optional[typing.List[InheritedStyleEntry]], typing.Optional[typing.List[InheritedPseudoElementMatches]], typing.Optional[typing.List[CSSKeyframesRule]], typing.Optional[typing.List[CSSPositionFallbackRule]], typing.Optional[dom.NodeId]]]: ''' Returns requested styles for a DOM node identified by ``nodeId``. @@ -1173,7 +1408,10 @@ def get_matched_styles_for_node( 2. **matchedCSSRules** - *(Optional)* CSS rules matching this node, from all applicable stylesheets. 3. **pseudoElements** - *(Optional)* Pseudo style matches for this node. 4. **inherited** - *(Optional)* A chain of inherited styles (from the immediate node parent up to the DOM tree root). - 5. **cssKeyframesRules** - *(Optional)* A list of CSS keyframed animations matching this node. + 5. **inheritedPseudoElements** - *(Optional)* A chain of inherited pseudo element styles (from the immediate node parent up to the DOM tree root). + 6. **cssKeyframesRules** - *(Optional)* A list of CSS keyframed animations matching this node. + 7. **cssPositionFallbackRules** - *(Optional)* A list of CSS position fallbacks matching this node. + 8. **parentLayoutNodeId** - *(Optional)* Id of the first parent element that does not have display: contents. ''' params: T_JSON_DICT = dict() params['nodeId'] = node_id.to_json() @@ -1183,12 +1421,15 @@ def get_matched_styles_for_node( } json = yield cmd_dict return ( - CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None, - CSSStyle.from_json(json['attributesStyle']) if 'attributesStyle' in json else None, - [RuleMatch.from_json(i) for i in json['matchedCSSRules']] if 'matchedCSSRules' in json else None, - [PseudoElementMatches.from_json(i) for i in json['pseudoElements']] if 'pseudoElements' in json else None, - [InheritedStyleEntry.from_json(i) for i in json['inherited']] if 'inherited' in json else None, - [CSSKeyframesRule.from_json(i) for i in json['cssKeyframesRules']] if 'cssKeyframesRules' in json else None + CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None, + CSSStyle.from_json(json['attributesStyle']) if json.get('attributesStyle', None) is not None else None, + [RuleMatch.from_json(i) for i in json['matchedCSSRules']] if json.get('matchedCSSRules', None) is not None else None, + [PseudoElementMatches.from_json(i) for i in json['pseudoElements']] if json.get('pseudoElements', None) is not None else None, + [InheritedStyleEntry.from_json(i) for i in json['inherited']] if json.get('inherited', None) is not None else None, + [InheritedPseudoElementMatches.from_json(i) for i in json['inheritedPseudoElements']] if json.get('inheritedPseudoElements', None) is not None else None, + [CSSKeyframesRule.from_json(i) for i in json['cssKeyframesRules']] if json.get('cssKeyframesRules', None) is not None else None, + [CSSPositionFallbackRule.from_json(i) for i in json['cssPositionFallbackRules']] if json.get('cssPositionFallbackRules', None) is not None else None, + dom.NodeId.from_json(json['parentLayoutNodeId']) if json.get('parentLayoutNodeId', None) is not None else None ) @@ -1244,6 +1485,30 @@ def get_style_sheet_text( return str(json['text']) +def get_layers_for_node( + node_id: dom.NodeId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSLayerData]: + ''' + Returns all layers parsed by the rendering engine for the tree scope of a node. + Given a DOM element identified by nodeId, getLayersForNode returns the root + layer for the nearest ancestor document or shadow root. The layer root contains + the full layer tree for the tree scope and their ordering. + + **EXPERIMENTAL** + + :param node_id: + :returns: + ''' + params: T_JSON_DICT = dict() + params['nodeId'] = node_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'CSS.getLayersForNode', + 'params': params, + } + json = yield cmd_dict + return CSSLayerData.from_json(json['rootLayer']) + + def track_computed_style_updates( properties_to_track: typing.List[CSSComputedStyleProperty] ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: @@ -1274,7 +1539,7 @@ def take_computed_style_updates() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,ty **EXPERIMENTAL** - :returns: The list of node Ids that have their tracked computed styles updated + :returns: The list of node Ids that have their tracked computed styles updated. ''' cmd_dict: T_JSON_DICT = { 'method': 'CSS.takeComputedStyleUpdates', @@ -1411,6 +1676,33 @@ def set_supports_text( return CSSSupports.from_json(json['supports']) +def set_scope_text( + style_sheet_id: StyleSheetId, + range_: SourceRange, + text: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSScope]: + ''' + Modifies the expression of a scope at-rule. + + **EXPERIMENTAL** + + :param style_sheet_id: + :param range_: + :param text: + :returns: The resulting CSS Scope rule after modification. + ''' + params: T_JSON_DICT = dict() + params['styleSheetId'] = style_sheet_id.to_json() + params['range'] = range_.to_json() + params['text'] = text + cmd_dict: T_JSON_DICT = { + 'method': 'CSS.setScopeText', + 'params': params, + } + json = yield cmd_dict + return CSSScope.from_json(json['scope']) + + def set_rule_selector( style_sheet_id: StyleSheetId, range_: SourceRange, @@ -1455,7 +1747,7 @@ def set_style_sheet_text( 'params': params, } json = yield cmd_dict - return str(json['sourceMapURL']) if 'sourceMapURL' in json else None + return str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None def set_style_texts( @@ -1490,7 +1782,7 @@ def start_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None def stop_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[RuleUsage]]: ''' Stop tracking rule usage and return the list of rules that were used since last call to - ``takeCoverageDelta`` (or since start of coverage instrumentation) + ``takeCoverageDelta`` (or since start of coverage instrumentation). :returns: ''' @@ -1504,7 +1796,7 @@ def stop_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typin def take_coverage_delta() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[RuleUsage], float]]: ''' Obtain list of rules that became used since last call to this method (or since start of coverage - instrumentation) + instrumentation). :returns: A tuple with the following items: @@ -1545,7 +1837,7 @@ def set_local_fonts_enabled( class FontsUpdated: ''' Fires whenever a web font is updated. A non-empty font parameter indicates a successfully loaded - web font + web font. ''' #: The web font that has loaded. font: typing.Optional[FontFace] @@ -1553,7 +1845,7 @@ class FontsUpdated: @classmethod def from_json(cls, json: T_JSON_DICT) -> FontsUpdated: return cls( - font=FontFace.from_json(json['font']) if 'font' in json else None + font=FontFace.from_json(json['font']) if json.get('font', None) is not None else None ) diff --git a/pycdp/cdp/database.py b/pycdp/cdp/database.py index b8052e4..d61d4c4 100644 --- a/pycdp/cdp/database.py +++ b/pycdp/cdp/database.py @@ -129,9 +129,9 @@ def execute_sql( } json = yield cmd_dict return ( - [str(i) for i in json['columnNames']] if 'columnNames' in json else None, - [i for i in json['values']] if 'values' in json else None, - Error.from_json(json['sqlError']) if 'sqlError' in json else None + [str(i) for i in json['columnNames']] if json.get('columnNames', None) is not None else None, + [i for i in json['values']] if json.get('values', None) is not None else None, + Error.from_json(json['sqlError']) if json.get('sqlError', None) is not None else None ) diff --git a/pycdp/cdp/debugger.py b/pycdp/cdp/debugger.py index ad67882..148b57f 100644 --- a/pycdp/cdp/debugger.py +++ b/pycdp/cdp/debugger.py @@ -72,7 +72,7 @@ def from_json(cls, json: T_JSON_DICT) -> Location: return cls( script_id=runtime.ScriptId.from_json(json['scriptId']), line_number=int(json['lineNumber']), - column_number=int(json['columnNumber']) if 'columnNumber' in json else None, + column_number=int(json['columnNumber']) if json.get('columnNumber', None) is not None else None, ) @@ -141,6 +141,8 @@ class CallFrame: location: Location #: JavaScript script name or url. + #: Deprecated in favor of using the ``location.scriptId`` to resolve the URL via a previously + #: sent ``Debugger.scriptParsed`` event. url: str #: Scope chain for this call frame. @@ -155,6 +157,12 @@ class CallFrame: #: The value being returned, if the function is at return point. return_value: typing.Optional[runtime.RemoteObject] = None + #: Valid only while the VM is paused and indicates whether this frame + #: can be restarted or not. Note that a ``true`` value here does not + #: guarantee that Debugger#restartFrame with this CallFrameId will be + #: successful, but it is very likely. + can_be_restarted: typing.Optional[bool] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['callFrameId'] = self.call_frame_id.to_json() @@ -167,6 +175,8 @@ def to_json(self) -> T_JSON_DICT: json['functionLocation'] = self.function_location.to_json() if self.return_value is not None: json['returnValue'] = self.return_value.to_json() + if self.can_be_restarted is not None: + json['canBeRestarted'] = self.can_be_restarted return json @classmethod @@ -178,8 +188,9 @@ def from_json(cls, json: T_JSON_DICT) -> CallFrame: url=str(json['url']), scope_chain=[Scope.from_json(i) for i in json['scopeChain']], this=runtime.RemoteObject.from_json(json['this']), - function_location=Location.from_json(json['functionLocation']) if 'functionLocation' in json else None, - return_value=runtime.RemoteObject.from_json(json['returnValue']) if 'returnValue' in json else None, + function_location=Location.from_json(json['functionLocation']) if json.get('functionLocation', None) is not None else None, + return_value=runtime.RemoteObject.from_json(json['returnValue']) if json.get('returnValue', None) is not None else None, + can_be_restarted=bool(json['canBeRestarted']) if json.get('canBeRestarted', None) is not None else None, ) @@ -221,9 +232,9 @@ def from_json(cls, json: T_JSON_DICT) -> Scope: return cls( type_=str(json['type']), object_=runtime.RemoteObject.from_json(json['object']), - name=str(json['name']) if 'name' in json else None, - start_location=Location.from_json(json['startLocation']) if 'startLocation' in json else None, - end_location=Location.from_json(json['endLocation']) if 'endLocation' in json else None, + name=str(json['name']) if json.get('name', None) is not None else None, + start_location=Location.from_json(json['startLocation']) if json.get('startLocation', None) is not None else None, + end_location=Location.from_json(json['endLocation']) if json.get('endLocation', None) is not None else None, ) @@ -280,8 +291,30 @@ def from_json(cls, json: T_JSON_DICT) -> BreakLocation: return cls( script_id=runtime.ScriptId.from_json(json['scriptId']), line_number=int(json['lineNumber']), - column_number=int(json['columnNumber']) if 'columnNumber' in json else None, - type_=str(json['type']) if 'type' in json else None, + column_number=int(json['columnNumber']) if json.get('columnNumber', None) is not None else None, + type_=str(json['type']) if json.get('type', None) is not None else None, + ) + + +@dataclass +class WasmDisassemblyChunk: + #: The next chunk of disassembled lines. + lines: typing.List[str] + + #: The bytecode offsets describing the start of each line. + bytecode_offsets: typing.List[int] + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['lines'] = [i for i in self.lines] + json['bytecodeOffsets'] = [i for i in self.bytecode_offsets] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> WasmDisassemblyChunk: + return cls( + lines=[str(i) for i in json['lines']], + bytecode_offsets=[int(i) for i in json['bytecodeOffsets']], ) @@ -322,7 +355,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> DebugSymbols: return cls( type_=str(json['type']), - external_url=str(json['externalURL']) if 'externalURL' in json else None, + external_url=str(json['externalURL']) if json.get('externalURL', None) is not None else None, ) @@ -430,7 +463,7 @@ def evaluate_on_call_frame( json = yield cmd_dict return ( runtime.RemoteObject.from_json(json['result']), - runtime.ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + runtime.ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -483,10 +516,65 @@ def get_script_source( json = yield cmd_dict return ( str(json['scriptSource']), - str(json['bytecode']) if 'bytecode' in json else None + str(json['bytecode']) if json.get('bytecode', None) is not None else None + ) + + +def disassemble_wasm_module( + script_id: runtime.ScriptId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[str], int, typing.List[int], WasmDisassemblyChunk]]: + ''' + + + **EXPERIMENTAL** + + :param script_id: Id of the script to disassemble + :returns: A tuple with the following items: + + 0. **streamId** - *(Optional)* For large modules, return a stream from which additional chunks of disassembly can be read successively. + 1. **totalNumberOfLines** - The total number of lines in the disassembly text. + 2. **functionBodyOffsets** - The offsets of all function bodies, in the format [start1, end1, start2, end2, ...] where all ends are exclusive. + 3. **chunk** - The first chunk of disassembly. + ''' + params: T_JSON_DICT = dict() + params['scriptId'] = script_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'Debugger.disassembleWasmModule', + 'params': params, + } + json = yield cmd_dict + return ( + str(json['streamId']) if json.get('streamId', None) is not None else None, + int(json['totalNumberOfLines']), + [int(i) for i in json['functionBodyOffsets']], + WasmDisassemblyChunk.from_json(json['chunk']) ) +def next_wasm_disassembly_chunk( + stream_id: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,WasmDisassemblyChunk]: + ''' + Disassemble the next chunk of lines for the module corresponding to the + stream. If disassembly is complete, this API will invalidate the streamId + and return an empty chunk. Any subsequent calls for the now invalid stream + will return errors. + + **EXPERIMENTAL** + + :param stream_id: + :returns: The next chunk of disassembly. + ''' + params: T_JSON_DICT = dict() + params['streamId'] = stream_id + cmd_dict: T_JSON_DICT = { + 'method': 'Debugger.nextWasmDisassemblyChunk', + 'params': params, + } + json = yield cmd_dict + return WasmDisassemblyChunk.from_json(json['chunk']) + + @deprecated(version="1.3") def get_wasm_bytecode( script_id: runtime.ScriptId @@ -579,16 +667,27 @@ def remove_breakpoint( json = yield cmd_dict -@deprecated(version="1.3") def restart_frame( - call_frame_id: CallFrameId + call_frame_id: CallFrameId, + mode: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[CallFrame], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId]]]: ''' - Restarts particular call frame from the beginning. + Restarts particular call frame from the beginning. The old, deprecated + behavior of ``restartFrame`` is to stay paused and allow further CDP commands + after a restart was scheduled. This can cause problems with restarting, so + we now continue execution immediatly after it has been scheduled until we + reach the beginning of the restarted frame. - .. deprecated:: 1.3 + To stay back-wards compatible, ``restartFrame`` now expects a ``mode`` + parameter to be present. If the ``mode`` parameter is missing, ``restartFrame`` + errors out. + + The various return values are deprecated and ``callFrames`` is always empty. + Use the call frames from the ``Debugger#paused`` events instead, that fires + once V8 pauses at the beginning of the restarted function. :param call_frame_id: Call frame identifier to evaluate on. + :param mode: **(EXPERIMENTAL)** *(Optional)* The ```mode```` parameter must be present and set to 'StepInto', otherwise ````restartFrame``` will error out. :returns: A tuple with the following items: 0. **callFrames** - New stack trace. @@ -597,6 +696,8 @@ def restart_frame( ''' params: T_JSON_DICT = dict() params['callFrameId'] = call_frame_id.to_json() + if mode is not None: + params['mode'] = mode cmd_dict: T_JSON_DICT = { 'method': 'Debugger.restartFrame', 'params': params, @@ -604,8 +705,8 @@ def restart_frame( json = yield cmd_dict return ( [CallFrame.from_json(i) for i in json['callFrames']], - runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None, - runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None + runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None, + runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None ) @@ -864,8 +965,8 @@ def set_pause_on_exceptions( state: str ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' - Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or - no exceptions. Initial pause on exceptions state is ``none``. + Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions, + or caught exceptions, no exceptions. Initial pause on exceptions state is ``none``. :param state: Pause on exceptions mode. ''' @@ -900,38 +1001,50 @@ def set_return_value( def set_script_source( script_id: runtime.ScriptId, script_source: str, - dry_run: typing.Optional[bool] = None - ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[typing.List[CallFrame]], typing.Optional[bool], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId], typing.Optional[runtime.ExceptionDetails]]]: + dry_run: typing.Optional[bool] = None, + allow_top_frame_editing: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[typing.List[CallFrame]], typing.Optional[bool], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId], str, typing.Optional[runtime.ExceptionDetails]]]: ''' Edits JavaScript source live. + In general, functions that are currently on the stack can not be edited with + a single exception: If the edited function is the top-most stack frame and + that is the only activation of that function on the stack. In this case + the live edit will be successful and a ``Debugger.restartFrame`` for the + top-most function is automatically triggered. + :param script_id: Id of the script to edit. :param script_source: New content of the script. :param dry_run: *(Optional)* If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code. + :param allow_top_frame_editing: **(EXPERIMENTAL)** *(Optional)* If true, then ```scriptSource```` is allowed to change the function on top of the stack as long as the top-most stack frame is the only activation of that function. :returns: A tuple with the following items: 0. **callFrames** - *(Optional)* New stack trace in case editing has happened while VM was stopped. 1. **stackChanged** - *(Optional)* Whether current call stack was modified after applying the changes. 2. **asyncStackTrace** - *(Optional)* Async stack trace, if any. 3. **asyncStackTraceId** - *(Optional)* Async stack trace, if any. - 4. **exceptionDetails** - *(Optional)* Exception details if any. + 4. **status** - Whether the operation was successful or not. Only `` Ok`` denotes a successful live edit while the other enum variants denote why the live edit failed. + 5. **exceptionDetails** - *(Optional)* Exception details if any. Only present when `` status`` is `` CompileError`. ''' params: T_JSON_DICT = dict() params['scriptId'] = script_id.to_json() params['scriptSource'] = script_source if dry_run is not None: params['dryRun'] = dry_run + if allow_top_frame_editing is not None: + params['allowTopFrameEditing'] = allow_top_frame_editing cmd_dict: T_JSON_DICT = { 'method': 'Debugger.setScriptSource', 'params': params, } json = yield cmd_dict return ( - [CallFrame.from_json(i) for i in json['callFrames']] if 'callFrames' in json else None, - bool(json['stackChanged']) if 'stackChanged' in json else None, - runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None, - runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None, - runtime.ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + [CallFrame.from_json(i) for i in json['callFrames']] if json.get('callFrames', None) is not None else None, + bool(json['stackChanged']) if json.get('stackChanged', None) is not None else None, + runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None, + runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None, + str(json['status']), + runtime.ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -1074,11 +1187,11 @@ def from_json(cls, json: T_JSON_DICT) -> Paused: return cls( call_frames=[CallFrame.from_json(i) for i in json['callFrames']], reason=str(json['reason']), - data=dict(json['data']) if 'data' in json else None, - hit_breakpoints=[str(i) for i in json['hitBreakpoints']] if 'hitBreakpoints' in json else None, - async_stack_trace=runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None, - async_stack_trace_id=runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None, - async_call_stack_trace_id=runtime.StackTraceId.from_json(json['asyncCallStackTraceId']) if 'asyncCallStackTraceId' in json else None + data=dict(json['data']) if json.get('data', None) is not None else None, + hit_breakpoints=[str(i) for i in json['hitBreakpoints']] if json.get('hitBreakpoints', None) is not None else None, + async_stack_trace=runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None, + async_stack_trace_id=runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None, + async_call_stack_trace_id=runtime.StackTraceId.from_json(json['asyncCallStackTraceId']) if json.get('asyncCallStackTraceId', None) is not None else None ) @@ -1117,7 +1230,7 @@ class ScriptFailedToParse: end_column: int #: Specifies script creation context. execution_context_id: runtime.ExecutionContextId - #: Content hash of the script. + #: Content hash of the script, SHA-256. hash_: str #: Embedder-specific auxiliary data. execution_context_aux_data: typing.Optional[dict] @@ -1149,15 +1262,15 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptFailedToParse: end_column=int(json['endColumn']), execution_context_id=runtime.ExecutionContextId.from_json(json['executionContextId']), hash_=str(json['hash']), - execution_context_aux_data=dict(json['executionContextAuxData']) if 'executionContextAuxData' in json else None, - source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None, - has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None, - is_module=bool(json['isModule']) if 'isModule' in json else None, - length=int(json['length']) if 'length' in json else None, - stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None, - code_offset=int(json['codeOffset']) if 'codeOffset' in json else None, - script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None, - embedder_name=str(json['embedderName']) if 'embedderName' in json else None + execution_context_aux_data=dict(json['executionContextAuxData']) if json.get('executionContextAuxData', None) is not None else None, + source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None, + has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None, + is_module=bool(json['isModule']) if json.get('isModule', None) is not None else None, + length=int(json['length']) if json.get('length', None) is not None else None, + stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None, + code_offset=int(json['codeOffset']) if json.get('codeOffset', None) is not None else None, + script_language=ScriptLanguage.from_json(json['scriptLanguage']) if json.get('scriptLanguage', None) is not None else None, + embedder_name=str(json['embedderName']) if json.get('embedderName', None) is not None else None ) @@ -1182,7 +1295,7 @@ class ScriptParsed: end_column: int #: Specifies script creation context. execution_context_id: runtime.ExecutionContextId - #: Content hash of the script. + #: Content hash of the script, SHA-256. hash_: str #: Embedder-specific auxiliary data. execution_context_aux_data: typing.Optional[dict] @@ -1218,15 +1331,15 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptParsed: end_column=int(json['endColumn']), execution_context_id=runtime.ExecutionContextId.from_json(json['executionContextId']), hash_=str(json['hash']), - execution_context_aux_data=dict(json['executionContextAuxData']) if 'executionContextAuxData' in json else None, - is_live_edit=bool(json['isLiveEdit']) if 'isLiveEdit' in json else None, - source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None, - has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None, - is_module=bool(json['isModule']) if 'isModule' in json else None, - length=int(json['length']) if 'length' in json else None, - stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None, - code_offset=int(json['codeOffset']) if 'codeOffset' in json else None, - script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None, - debug_symbols=DebugSymbols.from_json(json['debugSymbols']) if 'debugSymbols' in json else None, - embedder_name=str(json['embedderName']) if 'embedderName' in json else None + execution_context_aux_data=dict(json['executionContextAuxData']) if json.get('executionContextAuxData', None) is not None else None, + is_live_edit=bool(json['isLiveEdit']) if json.get('isLiveEdit', None) is not None else None, + source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None, + has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None, + is_module=bool(json['isModule']) if json.get('isModule', None) is not None else None, + length=int(json['length']) if json.get('length', None) is not None else None, + stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None, + code_offset=int(json['codeOffset']) if json.get('codeOffset', None) is not None else None, + script_language=ScriptLanguage.from_json(json['scriptLanguage']) if json.get('scriptLanguage', None) is not None else None, + debug_symbols=DebugSymbols.from_json(json['debugSymbols']) if json.get('debugSymbols', None) is not None else None, + embedder_name=str(json['embedderName']) if json.get('embedderName', None) is not None else None ) diff --git a/pycdp/cdp/dom.py b/pycdp/cdp/dom.py index dd5a8ee..e8ebf69 100644 --- a/pycdp/cdp/dom.py +++ b/pycdp/cdp/dom.py @@ -100,10 +100,11 @@ class PseudoType(enum.Enum): SCROLLBAR_CORNER = "scrollbar-corner" RESIZER = "resizer" INPUT_LIST_BUTTON = "input-list-button" - TRANSITION = "transition" - TRANSITION_CONTAINER = "transition-container" - TRANSITION_OLD_CONTENT = "transition-old-content" - TRANSITION_NEW_CONTENT = "transition-new-content" + VIEW_TRANSITION = "view-transition" + VIEW_TRANSITION_GROUP = "view-transition-group" + VIEW_TRANSITION_IMAGE_PAIR = "view-transition-image-pair" + VIEW_TRANSITION_OLD = "view-transition-old" + VIEW_TRANSITION_NEW = "view-transition-new" def to_json(self) -> str: return self.value @@ -145,6 +146,38 @@ def from_json(cls, json: str) -> CompatibilityMode: return cls(json) +class PhysicalAxes(enum.Enum): + ''' + ContainerSelector physical axes + ''' + HORIZONTAL = "Horizontal" + VERTICAL = "Vertical" + BOTH = "Both" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> PhysicalAxes: + return cls(json) + + +class LogicalAxes(enum.Enum): + ''' + ContainerSelector logical axes + ''' + INLINE = "Inline" + BLOCK = "Block" + BOTH = "Both" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> LogicalAxes: + return cls(json) + + @dataclass class Node: ''' @@ -210,6 +243,10 @@ class Node: #: Pseudo element type for this node. pseudo_type: typing.Optional[PseudoType] = None + #: Pseudo element identifier for this node. Only present if there is a + #: valid pseudoType. + pseudo_identifier: typing.Optional[str] = None + #: Shadow root type. shadow_root_type: typing.Optional[ShadowRootType] = None @@ -241,6 +278,8 @@ class Node: compatibility_mode: typing.Optional[CompatibilityMode] = None + assigned_slot: typing.Optional[BackendNode] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['nodeId'] = self.node_id.to_json() @@ -275,6 +314,8 @@ def to_json(self) -> T_JSON_DICT: json['value'] = self.value if self.pseudo_type is not None: json['pseudoType'] = self.pseudo_type.to_json() + if self.pseudo_identifier is not None: + json['pseudoIdentifier'] = self.pseudo_identifier if self.shadow_root_type is not None: json['shadowRootType'] = self.shadow_root_type.to_json() if self.frame_id is not None: @@ -295,6 +336,8 @@ def to_json(self) -> T_JSON_DICT: json['isSVG'] = self.is_svg if self.compatibility_mode is not None: json['compatibilityMode'] = self.compatibility_mode.to_json() + if self.assigned_slot is not None: + json['assignedSlot'] = self.assigned_slot.to_json() return json @classmethod @@ -306,29 +349,31 @@ def from_json(cls, json: T_JSON_DICT) -> Node: node_name=str(json['nodeName']), local_name=str(json['localName']), node_value=str(json['nodeValue']), - parent_id=NodeId.from_json(json['parentId']) if 'parentId' in json else None, - child_node_count=int(json['childNodeCount']) if 'childNodeCount' in json else None, - children=[Node.from_json(i) for i in json['children']] if 'children' in json else None, - attributes=[str(i) for i in json['attributes']] if 'attributes' in json else None, - document_url=str(json['documentURL']) if 'documentURL' in json else None, - base_url=str(json['baseURL']) if 'baseURL' in json else None, - public_id=str(json['publicId']) if 'publicId' in json else None, - system_id=str(json['systemId']) if 'systemId' in json else None, - internal_subset=str(json['internalSubset']) if 'internalSubset' in json else None, - xml_version=str(json['xmlVersion']) if 'xmlVersion' in json else None, - name=str(json['name']) if 'name' in json else None, - value=str(json['value']) if 'value' in json else None, - pseudo_type=PseudoType.from_json(json['pseudoType']) if 'pseudoType' in json else None, - shadow_root_type=ShadowRootType.from_json(json['shadowRootType']) if 'shadowRootType' in json else None, - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None, - content_document=Node.from_json(json['contentDocument']) if 'contentDocument' in json else None, - shadow_roots=[Node.from_json(i) for i in json['shadowRoots']] if 'shadowRoots' in json else None, - template_content=Node.from_json(json['templateContent']) if 'templateContent' in json else None, - pseudo_elements=[Node.from_json(i) for i in json['pseudoElements']] if 'pseudoElements' in json else None, - imported_document=Node.from_json(json['importedDocument']) if 'importedDocument' in json else None, - distributed_nodes=[BackendNode.from_json(i) for i in json['distributedNodes']] if 'distributedNodes' in json else None, - is_svg=bool(json['isSVG']) if 'isSVG' in json else None, - compatibility_mode=CompatibilityMode.from_json(json['compatibilityMode']) if 'compatibilityMode' in json else None, + parent_id=NodeId.from_json(json['parentId']) if json.get('parentId', None) is not None else None, + child_node_count=int(json['childNodeCount']) if json.get('childNodeCount', None) is not None else None, + children=[Node.from_json(i) for i in json['children']] if json.get('children', None) is not None else None, + attributes=[str(i) for i in json['attributes']] if json.get('attributes', None) is not None else None, + document_url=str(json['documentURL']) if json.get('documentURL', None) is not None else None, + base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None, + public_id=str(json['publicId']) if json.get('publicId', None) is not None else None, + system_id=str(json['systemId']) if json.get('systemId', None) is not None else None, + internal_subset=str(json['internalSubset']) if json.get('internalSubset', None) is not None else None, + xml_version=str(json['xmlVersion']) if json.get('xmlVersion', None) is not None else None, + name=str(json['name']) if json.get('name', None) is not None else None, + value=str(json['value']) if json.get('value', None) is not None else None, + pseudo_type=PseudoType.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None, + pseudo_identifier=str(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None, + shadow_root_type=ShadowRootType.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None, + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None, + content_document=Node.from_json(json['contentDocument']) if json.get('contentDocument', None) is not None else None, + shadow_roots=[Node.from_json(i) for i in json['shadowRoots']] if json.get('shadowRoots', None) is not None else None, + template_content=Node.from_json(json['templateContent']) if json.get('templateContent', None) is not None else None, + pseudo_elements=[Node.from_json(i) for i in json['pseudoElements']] if json.get('pseudoElements', None) is not None else None, + imported_document=Node.from_json(json['importedDocument']) if json.get('importedDocument', None) is not None else None, + distributed_nodes=[BackendNode.from_json(i) for i in json['distributedNodes']] if json.get('distributedNodes', None) is not None else None, + is_svg=bool(json['isSVG']) if json.get('isSVG', None) is not None else None, + compatibility_mode=CompatibilityMode.from_json(json['compatibilityMode']) if json.get('compatibilityMode', None) is not None else None, + assigned_slot=BackendNode.from_json(json['assignedSlot']) if json.get('assignedSlot', None) is not None else None, ) @@ -364,7 +409,7 @@ def from_json(cls, json: T_JSON_DICT) -> RGBA: r=int(json['r']), g=int(json['g']), b=int(json['b']), - a=float(json['a']) if 'a' in json else None, + a=float(json['a']) if json.get('a', None) is not None else None, ) @@ -430,7 +475,7 @@ def from_json(cls, json: T_JSON_DICT) -> BoxModel: margin=Quad.from_json(json['margin']), width=int(json['width']), height=int(json['height']), - shape_outside=ShapeOutsideInfo.from_json(json['shapeOutside']) if 'shapeOutside' in json else None, + shape_outside=ShapeOutsideInfo.from_json(json['shapeOutside']) if json.get('shapeOutside', None) is not None else None, ) @@ -800,6 +845,7 @@ def get_document( ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,Node]: ''' Returns the root DOM node (and optionally the subtree) to the caller. + Implicitly enables the DOM domain events for the current target. :param depth: *(Optional)* The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the entire subtree or provide an integer larger than 0. :param pierce: *(Optional)* Whether or not iframes and shadow roots should be traversed when returning the subtree (default is false). @@ -910,7 +956,7 @@ def get_node_for_location( return ( BackendNodeId.from_json(json['backendNodeId']), page.FrameId.from_json(json['frameId']), - NodeId.from_json(json['nodeId']) if 'nodeId' in json else None + NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None ) @@ -1177,6 +1223,23 @@ def query_selector_all( return [NodeId.from_json(i) for i in json['nodeIds']] +def get_top_layer_elements() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[NodeId]]: + ''' + Returns NodeIds of current top layer elements. + Top layer is rendered closest to the user within a viewport, therefore its elements always + appear on top of all other content. + + **EXPERIMENTAL** + + :returns: NodeIds of top layer elements + ''' + cmd_dict: T_JSON_DICT = { + 'method': 'DOM.getTopLayerElements', + } + json = yield cmd_dict + return [NodeId.from_json(i) for i in json['nodeIds']] + + def redo() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Re-does the last undone action. @@ -1420,7 +1483,7 @@ def get_node_stack_traces( 'params': params, } json = yield cmd_dict - return runtime.StackTrace.from_json(json['creation']) if 'creation' in json else None + return runtime.StackTrace.from_json(json['creation']) if json.get('creation', None) is not None else None def get_file_info( @@ -1562,35 +1625,44 @@ def get_frame_owner( json = yield cmd_dict return ( BackendNodeId.from_json(json['backendNodeId']), - NodeId.from_json(json['nodeId']) if 'nodeId' in json else None + NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None ) def get_container_for_node( node_id: NodeId, - container_name: typing.Optional[str] = None + container_name: typing.Optional[str] = None, + physical_axes: typing.Optional[PhysicalAxes] = None, + logical_axes: typing.Optional[LogicalAxes] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[NodeId]]: ''' - Returns the container of the given node based on container query conditions. - If containerName is given, it will find the nearest container with a matching name; - otherwise it will find the nearest container regardless of its container name. + Returns the query container of the given node based on container query + conditions: containerName, physical, and logical axes. If no axes are + provided, the style container is returned, which is the direct parent or the + closest element with a matching container-name. **EXPERIMENTAL** :param node_id: :param container_name: *(Optional)* + :param physical_axes: *(Optional)* + :param logical_axes: *(Optional)* :returns: *(Optional)* The container node for the given node, or null if not found. ''' params: T_JSON_DICT = dict() params['nodeId'] = node_id.to_json() if container_name is not None: params['containerName'] = container_name + if physical_axes is not None: + params['physicalAxes'] = physical_axes.to_json() + if logical_axes is not None: + params['logicalAxes'] = logical_axes.to_json() cmd_dict: T_JSON_DICT = { 'method': 'DOM.getContainerForNode', 'params': params, } json = yield cmd_dict - return NodeId.from_json(json['nodeId']) if 'nodeId' in json else None + return NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None def get_querying_descendants_for_container( @@ -1702,7 +1774,7 @@ class ChildNodeInserted: ''' #: Id of the node that has changed. parent_node_id: NodeId - #: If of the previous siblint. + #: Id of the previous sibling. previous_node_id: NodeId #: Inserted node data. node: Node @@ -1810,6 +1882,23 @@ def from_json(cls, json: T_JSON_DICT) -> PseudoElementAdded: ) +@event_class('DOM.topLayerElementsUpdated') +@dataclass +class TopLayerElementsUpdated: + ''' + **EXPERIMENTAL** + + Called when top layer elements are changed. + ''' + + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> TopLayerElementsUpdated: + return cls( + + ) + + @event_class('DOM.pseudoElementRemoved') @dataclass class PseudoElementRemoved: diff --git a/pycdp/cdp/dom_debugger.py b/pycdp/cdp/dom_debugger.py index 19fd2bc..624776a 100644 --- a/pycdp/cdp/dom_debugger.py +++ b/pycdp/cdp/dom_debugger.py @@ -108,9 +108,9 @@ def from_json(cls, json: T_JSON_DICT) -> EventListener: script_id=runtime.ScriptId.from_json(json['scriptId']), line_number=int(json['lineNumber']), column_number=int(json['columnNumber']), - handler=runtime.RemoteObject.from_json(json['handler']) if 'handler' in json else None, - original_handler=runtime.RemoteObject.from_json(json['originalHandler']) if 'originalHandler' in json else None, - backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None, + handler=runtime.RemoteObject.from_json(json['handler']) if json.get('handler', None) is not None else None, + original_handler=runtime.RemoteObject.from_json(json['originalHandler']) if json.get('originalHandler', None) is not None else None, + backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None, ) diff --git a/pycdp/cdp/dom_snapshot.py b/pycdp/cdp/dom_snapshot.py index c9e2513..017c219 100644 --- a/pycdp/cdp/dom_snapshot.py +++ b/pycdp/cdp/dom_snapshot.py @@ -174,30 +174,30 @@ def from_json(cls, json: T_JSON_DICT) -> DOMNode: node_name=str(json['nodeName']), node_value=str(json['nodeValue']), backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']), - text_value=str(json['textValue']) if 'textValue' in json else None, - input_value=str(json['inputValue']) if 'inputValue' in json else None, - input_checked=bool(json['inputChecked']) if 'inputChecked' in json else None, - option_selected=bool(json['optionSelected']) if 'optionSelected' in json else None, - child_node_indexes=[int(i) for i in json['childNodeIndexes']] if 'childNodeIndexes' in json else None, - attributes=[NameValue.from_json(i) for i in json['attributes']] if 'attributes' in json else None, - pseudo_element_indexes=[int(i) for i in json['pseudoElementIndexes']] if 'pseudoElementIndexes' in json else None, - layout_node_index=int(json['layoutNodeIndex']) if 'layoutNodeIndex' in json else None, - document_url=str(json['documentURL']) if 'documentURL' in json else None, - base_url=str(json['baseURL']) if 'baseURL' in json else None, - content_language=str(json['contentLanguage']) if 'contentLanguage' in json else None, - document_encoding=str(json['documentEncoding']) if 'documentEncoding' in json else None, - public_id=str(json['publicId']) if 'publicId' in json else None, - system_id=str(json['systemId']) if 'systemId' in json else None, - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None, - content_document_index=int(json['contentDocumentIndex']) if 'contentDocumentIndex' in json else None, - pseudo_type=dom.PseudoType.from_json(json['pseudoType']) if 'pseudoType' in json else None, - shadow_root_type=dom.ShadowRootType.from_json(json['shadowRootType']) if 'shadowRootType' in json else None, - is_clickable=bool(json['isClickable']) if 'isClickable' in json else None, - event_listeners=[dom_debugger.EventListener.from_json(i) for i in json['eventListeners']] if 'eventListeners' in json else None, - current_source_url=str(json['currentSourceURL']) if 'currentSourceURL' in json else None, - origin_url=str(json['originURL']) if 'originURL' in json else None, - scroll_offset_x=float(json['scrollOffsetX']) if 'scrollOffsetX' in json else None, - scroll_offset_y=float(json['scrollOffsetY']) if 'scrollOffsetY' in json else None, + text_value=str(json['textValue']) if json.get('textValue', None) is not None else None, + input_value=str(json['inputValue']) if json.get('inputValue', None) is not None else None, + input_checked=bool(json['inputChecked']) if json.get('inputChecked', None) is not None else None, + option_selected=bool(json['optionSelected']) if json.get('optionSelected', None) is not None else None, + child_node_indexes=[int(i) for i in json['childNodeIndexes']] if json.get('childNodeIndexes', None) is not None else None, + attributes=[NameValue.from_json(i) for i in json['attributes']] if json.get('attributes', None) is not None else None, + pseudo_element_indexes=[int(i) for i in json['pseudoElementIndexes']] if json.get('pseudoElementIndexes', None) is not None else None, + layout_node_index=int(json['layoutNodeIndex']) if json.get('layoutNodeIndex', None) is not None else None, + document_url=str(json['documentURL']) if json.get('documentURL', None) is not None else None, + base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None, + content_language=str(json['contentLanguage']) if json.get('contentLanguage', None) is not None else None, + document_encoding=str(json['documentEncoding']) if json.get('documentEncoding', None) is not None else None, + public_id=str(json['publicId']) if json.get('publicId', None) is not None else None, + system_id=str(json['systemId']) if json.get('systemId', None) is not None else None, + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None, + content_document_index=int(json['contentDocumentIndex']) if json.get('contentDocumentIndex', None) is not None else None, + pseudo_type=dom.PseudoType.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None, + shadow_root_type=dom.ShadowRootType.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None, + is_clickable=bool(json['isClickable']) if json.get('isClickable', None) is not None else None, + event_listeners=[dom_debugger.EventListener.from_json(i) for i in json['eventListeners']] if json.get('eventListeners', None) is not None else None, + current_source_url=str(json['currentSourceURL']) if json.get('currentSourceURL', None) is not None else None, + origin_url=str(json['originURL']) if json.get('originURL', None) is not None else None, + scroll_offset_x=float(json['scrollOffsetX']) if json.get('scrollOffsetX', None) is not None else None, + scroll_offset_y=float(json['scrollOffsetY']) if json.get('scrollOffsetY', None) is not None else None, ) @@ -283,11 +283,11 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutTreeNode: return cls( dom_node_index=int(json['domNodeIndex']), bounding_box=dom.Rect.from_json(json['boundingBox']), - layout_text=str(json['layoutText']) if 'layoutText' in json else None, - inline_text_nodes=[InlineTextBox.from_json(i) for i in json['inlineTextNodes']] if 'inlineTextNodes' in json else None, - style_index=int(json['styleIndex']) if 'styleIndex' in json else None, - paint_order=int(json['paintOrder']) if 'paintOrder' in json else None, - is_stacking_context=bool(json['isStackingContext']) if 'isStackingContext' in json else None, + layout_text=str(json['layoutText']) if json.get('layoutText', None) is not None else None, + inline_text_nodes=[InlineTextBox.from_json(i) for i in json['inlineTextNodes']] if json.get('inlineTextNodes', None) is not None else None, + style_index=int(json['styleIndex']) if json.get('styleIndex', None) is not None else None, + paint_order=int(json['paintOrder']) if json.get('paintOrder', None) is not None else None, + is_stacking_context=bool(json['isStackingContext']) if json.get('isStackingContext', None) is not None else None, ) @@ -524,10 +524,10 @@ def from_json(cls, json: T_JSON_DICT) -> DocumentSnapshot: nodes=NodeTreeSnapshot.from_json(json['nodes']), layout=LayoutTreeSnapshot.from_json(json['layout']), text_boxes=TextBoxSnapshot.from_json(json['textBoxes']), - scroll_offset_x=float(json['scrollOffsetX']) if 'scrollOffsetX' in json else None, - scroll_offset_y=float(json['scrollOffsetY']) if 'scrollOffsetY' in json else None, - content_width=float(json['contentWidth']) if 'contentWidth' in json else None, - content_height=float(json['contentHeight']) if 'contentHeight' in json else None, + scroll_offset_x=float(json['scrollOffsetX']) if json.get('scrollOffsetX', None) is not None else None, + scroll_offset_y=float(json['scrollOffsetY']) if json.get('scrollOffsetY', None) is not None else None, + content_width=float(json['contentWidth']) if json.get('contentWidth', None) is not None else None, + content_height=float(json['contentHeight']) if json.get('contentHeight', None) is not None else None, ) @@ -575,6 +575,10 @@ class NodeTreeSnapshot: #: Type of a pseudo element node. pseudo_type: typing.Optional[RareStringData] = None + #: Pseudo element identifier for this node. Only present if there is a + #: valid pseudoType. + pseudo_identifier: typing.Optional[RareStringData] = None + #: Whether this DOM node responds to mouse clicks. This includes nodes that have had click #: event listeners attached via JavaScript as well as anchor tags that naturally navigate when #: clicked. @@ -614,6 +618,8 @@ def to_json(self) -> T_JSON_DICT: json['contentDocumentIndex'] = self.content_document_index.to_json() if self.pseudo_type is not None: json['pseudoType'] = self.pseudo_type.to_json() + if self.pseudo_identifier is not None: + json['pseudoIdentifier'] = self.pseudo_identifier.to_json() if self.is_clickable is not None: json['isClickable'] = self.is_clickable.to_json() if self.current_source_url is not None: @@ -625,22 +631,23 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> NodeTreeSnapshot: return cls( - parent_index=[int(i) for i in json['parentIndex']] if 'parentIndex' in json else None, - node_type=[int(i) for i in json['nodeType']] if 'nodeType' in json else None, - shadow_root_type=RareStringData.from_json(json['shadowRootType']) if 'shadowRootType' in json else None, - node_name=[StringIndex.from_json(i) for i in json['nodeName']] if 'nodeName' in json else None, - node_value=[StringIndex.from_json(i) for i in json['nodeValue']] if 'nodeValue' in json else None, - backend_node_id=[dom.BackendNodeId.from_json(i) for i in json['backendNodeId']] if 'backendNodeId' in json else None, - attributes=[ArrayOfStrings.from_json(i) for i in json['attributes']] if 'attributes' in json else None, - text_value=RareStringData.from_json(json['textValue']) if 'textValue' in json else None, - input_value=RareStringData.from_json(json['inputValue']) if 'inputValue' in json else None, - input_checked=RareBooleanData.from_json(json['inputChecked']) if 'inputChecked' in json else None, - option_selected=RareBooleanData.from_json(json['optionSelected']) if 'optionSelected' in json else None, - content_document_index=RareIntegerData.from_json(json['contentDocumentIndex']) if 'contentDocumentIndex' in json else None, - pseudo_type=RareStringData.from_json(json['pseudoType']) if 'pseudoType' in json else None, - is_clickable=RareBooleanData.from_json(json['isClickable']) if 'isClickable' in json else None, - current_source_url=RareStringData.from_json(json['currentSourceURL']) if 'currentSourceURL' in json else None, - origin_url=RareStringData.from_json(json['originURL']) if 'originURL' in json else None, + parent_index=[int(i) for i in json['parentIndex']] if json.get('parentIndex', None) is not None else None, + node_type=[int(i) for i in json['nodeType']] if json.get('nodeType', None) is not None else None, + shadow_root_type=RareStringData.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None, + node_name=[StringIndex.from_json(i) for i in json['nodeName']] if json.get('nodeName', None) is not None else None, + node_value=[StringIndex.from_json(i) for i in json['nodeValue']] if json.get('nodeValue', None) is not None else None, + backend_node_id=[dom.BackendNodeId.from_json(i) for i in json['backendNodeId']] if json.get('backendNodeId', None) is not None else None, + attributes=[ArrayOfStrings.from_json(i) for i in json['attributes']] if json.get('attributes', None) is not None else None, + text_value=RareStringData.from_json(json['textValue']) if json.get('textValue', None) is not None else None, + input_value=RareStringData.from_json(json['inputValue']) if json.get('inputValue', None) is not None else None, + input_checked=RareBooleanData.from_json(json['inputChecked']) if json.get('inputChecked', None) is not None else None, + option_selected=RareBooleanData.from_json(json['optionSelected']) if json.get('optionSelected', None) is not None else None, + content_document_index=RareIntegerData.from_json(json['contentDocumentIndex']) if json.get('contentDocumentIndex', None) is not None else None, + pseudo_type=RareStringData.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None, + pseudo_identifier=RareStringData.from_json(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None, + is_clickable=RareBooleanData.from_json(json['isClickable']) if json.get('isClickable', None) is not None else None, + current_source_url=RareStringData.from_json(json['currentSourceURL']) if json.get('currentSourceURL', None) is not None else None, + origin_url=RareStringData.from_json(json['originURL']) if json.get('originURL', None) is not None else None, ) @@ -713,12 +720,12 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutTreeSnapshot: bounds=[Rectangle.from_json(i) for i in json['bounds']], text=[StringIndex.from_json(i) for i in json['text']], stacking_contexts=RareBooleanData.from_json(json['stackingContexts']), - paint_orders=[int(i) for i in json['paintOrders']] if 'paintOrders' in json else None, - offset_rects=[Rectangle.from_json(i) for i in json['offsetRects']] if 'offsetRects' in json else None, - scroll_rects=[Rectangle.from_json(i) for i in json['scrollRects']] if 'scrollRects' in json else None, - client_rects=[Rectangle.from_json(i) for i in json['clientRects']] if 'clientRects' in json else None, - blended_background_colors=[StringIndex.from_json(i) for i in json['blendedBackgroundColors']] if 'blendedBackgroundColors' in json else None, - text_color_opacities=[float(i) for i in json['textColorOpacities']] if 'textColorOpacities' in json else None, + paint_orders=[int(i) for i in json['paintOrders']] if json.get('paintOrders', None) is not None else None, + offset_rects=[Rectangle.from_json(i) for i in json['offsetRects']] if json.get('offsetRects', None) is not None else None, + scroll_rects=[Rectangle.from_json(i) for i in json['scrollRects']] if json.get('scrollRects', None) is not None else None, + client_rects=[Rectangle.from_json(i) for i in json['clientRects']] if json.get('clientRects', None) is not None else None, + blended_background_colors=[StringIndex.from_json(i) for i in json['blendedBackgroundColors']] if json.get('blendedBackgroundColors', None) is not None else None, + text_color_opacities=[float(i) for i in json['textColorOpacities']] if json.get('textColorOpacities', None) is not None else None, ) diff --git a/pycdp/cdp/dom_storage.py b/pycdp/cdp/dom_storage.py index ca23937..9df6516 100644 --- a/pycdp/cdp/dom_storage.py +++ b/pycdp/cdp/dom_storage.py @@ -12,28 +12,47 @@ from .util import event_class, T_JSON_DICT +class SerializedStorageKey(str): + def to_json(self) -> str: + return self + + @classmethod + def from_json(cls, json: str) -> SerializedStorageKey: + return cls(json) + + def __repr__(self): + return 'SerializedStorageKey({})'.format(super().__repr__()) + + @dataclass class StorageId: ''' DOM Storage identifier. ''' - #: Security origin for the storage. - security_origin: str - #: Whether the storage is local storage (not session storage). is_local_storage: bool + #: Security origin for the storage. + security_origin: typing.Optional[str] = None + + #: Represents a key by which DOM Storage keys its CachedStorageAreas + storage_key: typing.Optional[SerializedStorageKey] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() - json['securityOrigin'] = self.security_origin json['isLocalStorage'] = self.is_local_storage + if self.security_origin is not None: + json['securityOrigin'] = self.security_origin + if self.storage_key is not None: + json['storageKey'] = self.storage_key.to_json() return json @classmethod def from_json(cls, json: T_JSON_DICT) -> StorageId: return cls( - security_origin=str(json['securityOrigin']), is_local_storage=bool(json['isLocalStorage']), + security_origin=str(json['securityOrigin']) if json.get('securityOrigin', None) is not None else None, + storage_key=SerializedStorageKey.from_json(json['storageKey']) if json.get('storageKey', None) is not None else None, ) diff --git a/pycdp/cdp/emulation.py b/pycdp/cdp/emulation.py index 0fd4269..b24dc28 100644 --- a/pycdp/cdp/emulation.py +++ b/pycdp/cdp/emulation.py @@ -150,12 +150,18 @@ class UserAgentMetadata: mobile: bool + #: Brands appearing in Sec-CH-UA. brands: typing.Optional[typing.List[UserAgentBrandVersion]] = None + #: Brands appearing in Sec-CH-UA-Full-Version-List. full_version_list: typing.Optional[typing.List[UserAgentBrandVersion]] = None full_version: typing.Optional[str] = None + bitness: typing.Optional[str] = None + + wow64: typing.Optional[bool] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['platform'] = self.platform @@ -169,6 +175,10 @@ def to_json(self) -> T_JSON_DICT: json['fullVersionList'] = [i.to_json() for i in self.full_version_list] if self.full_version is not None: json['fullVersion'] = self.full_version + if self.bitness is not None: + json['bitness'] = self.bitness + if self.wow64 is not None: + json['wow64'] = self.wow64 return json @classmethod @@ -179,9 +189,11 @@ def from_json(cls, json: T_JSON_DICT) -> UserAgentMetadata: architecture=str(json['architecture']), model=str(json['model']), mobile=bool(json['mobile']), - brands=[UserAgentBrandVersion.from_json(i) for i in json['brands']] if 'brands' in json else None, - full_version_list=[UserAgentBrandVersion.from_json(i) for i in json['fullVersionList']] if 'fullVersionList' in json else None, - full_version=str(json['fullVersion']) if 'fullVersion' in json else None, + brands=[UserAgentBrandVersion.from_json(i) for i in json['brands']] if json.get('brands', None) is not None else None, + full_version_list=[UserAgentBrandVersion.from_json(i) for i in json['fullVersionList']] if json.get('fullVersionList', None) is not None else None, + full_version=str(json['fullVersion']) if json.get('fullVersion', None) is not None else None, + bitness=str(json['bitness']) if json.get('bitness', None) is not None else None, + wow64=bool(json['wow64']) if json.get('wow64', None) is not None else None, ) @@ -190,7 +202,6 @@ class DisabledImageType(enum.Enum): Enum of image types that can be disabled. ''' AVIF = "avif" - JXL = "jxl" WEBP = "webp" def to_json(self) -> str: @@ -478,7 +489,7 @@ def set_emulated_vision_deficiency( **EXPERIMENTAL** - :param type_: Vision deficiency to emulate. + :param type_: Vision deficiency to emulate. Order: best-effort emulations come first, followed by any physiologically accurate emulations for medically recognized color vision deficiencies. ''' params: T_JSON_DICT = dict() params['type'] = type_ @@ -748,6 +759,25 @@ def set_disabled_image_types( json = yield cmd_dict +def set_hardware_concurrency_override( + hardware_concurrency: int + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + + + **EXPERIMENTAL** + + :param hardware_concurrency: Hardware concurrency to report + ''' + params: T_JSON_DICT = dict() + params['hardwareConcurrency'] = hardware_concurrency + cmd_dict: T_JSON_DICT = { + 'method': 'Emulation.setHardwareConcurrencyOverride', + 'params': params, + } + json = yield cmd_dict + + def set_user_agent_override( user_agent: str, accept_language: typing.Optional[str] = None, @@ -777,6 +807,25 @@ def set_user_agent_override( json = yield cmd_dict +def set_automation_override( + enabled: bool + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Allows overriding the automation flag. + + **EXPERIMENTAL** + + :param enabled: Whether the override should be enabled. + ''' + params: T_JSON_DICT = dict() + params['enabled'] = enabled + cmd_dict: T_JSON_DICT = { + 'method': 'Emulation.setAutomationOverride', + 'params': params, + } + json = yield cmd_dict + + @event_class('Emulation.virtualTimeBudgetExpired') @dataclass class VirtualTimeBudgetExpired: diff --git a/pycdp/cdp/fetch.py b/pycdp/cdp/fetch.py index 0ba5296..f37f4cc 100644 --- a/pycdp/cdp/fetch.py +++ b/pycdp/cdp/fetch.py @@ -73,9 +73,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> RequestPattern: return cls( - url_pattern=str(json['urlPattern']) if 'urlPattern' in json else None, - resource_type=network.ResourceType.from_json(json['resourceType']) if 'resourceType' in json else None, - request_stage=RequestStage.from_json(json['requestStage']) if 'requestStage' in json else None, + url_pattern=str(json['urlPattern']) if json.get('urlPattern', None) is not None else None, + resource_type=network.ResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None, + request_stage=RequestStage.from_json(json['requestStage']) if json.get('requestStage', None) is not None else None, ) @@ -134,7 +134,7 @@ def from_json(cls, json: T_JSON_DICT) -> AuthChallenge: origin=str(json['origin']), scheme=str(json['scheme']), realm=str(json['realm']), - source=str(json['source']) if 'source' in json else None, + source=str(json['source']) if json.get('source', None) is not None else None, ) @@ -169,8 +169,8 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AuthChallengeResponse: return cls( response=str(json['response']), - username=str(json['username']) if 'username' in json else None, - password=str(json['password']) if 'password' in json else None, + username=str(json['username']) if json.get('username', None) is not None else None, + password=str(json['password']) if json.get('password', None) is not None else None, ) @@ -278,7 +278,7 @@ def continue_request( :param url: *(Optional)* If set, the request url will be modified in a way that's not observable by page. :param method: *(Optional)* If set, the request method is overridden. :param post_data: *(Optional)* If set, overrides the post data in the request. (Encoded as a base64 string when passed over JSON) - :param headers: *(Optional)* If set, overrides the request headers. + :param headers: *(Optional)* If set, overrides the request headers. Note that the overrides do not extend to subsequent redirect hops, if a redirect happens. Another override may be applied to a different request produced by a redirect. :param intercept_response: **(EXPERIMENTAL)** *(Optional)* If set, overrides response interception behavior for this request. ''' params: T_JSON_DICT = dict() @@ -444,7 +444,10 @@ class RequestPaused: response_headers: typing.Optional[typing.List[HeaderEntry]] #: If the intercepted request had a corresponding Network.requestWillBeSent event fired for it, #: then this networkId will be the same as the requestId present in the requestWillBeSent event. - network_id: typing.Optional[RequestId] + network_id: typing.Optional[network.RequestId] + #: If the request is due to a redirect response from the server, the id of the request that + #: has caused the redirect. + redirected_request_id: typing.Optional[RequestId] @classmethod def from_json(cls, json: T_JSON_DICT) -> RequestPaused: @@ -453,11 +456,12 @@ def from_json(cls, json: T_JSON_DICT) -> RequestPaused: request=network.Request.from_json(json['request']), frame_id=page.FrameId.from_json(json['frameId']), resource_type=network.ResourceType.from_json(json['resourceType']), - response_error_reason=network.ErrorReason.from_json(json['responseErrorReason']) if 'responseErrorReason' in json else None, - response_status_code=int(json['responseStatusCode']) if 'responseStatusCode' in json else None, - response_status_text=str(json['responseStatusText']) if 'responseStatusText' in json else None, - response_headers=[HeaderEntry.from_json(i) for i in json['responseHeaders']] if 'responseHeaders' in json else None, - network_id=RequestId.from_json(json['networkId']) if 'networkId' in json else None + response_error_reason=network.ErrorReason.from_json(json['responseErrorReason']) if json.get('responseErrorReason', None) is not None else None, + response_status_code=int(json['responseStatusCode']) if json.get('responseStatusCode', None) is not None else None, + response_status_text=str(json['responseStatusText']) if json.get('responseStatusText', None) is not None else None, + response_headers=[HeaderEntry.from_json(i) for i in json['responseHeaders']] if json.get('responseHeaders', None) is not None else None, + network_id=network.RequestId.from_json(json['networkId']) if json.get('networkId', None) is not None else None, + redirected_request_id=RequestId.from_json(json['redirectedRequestId']) if json.get('redirectedRequestId', None) is not None else None ) diff --git a/pycdp/cdp/headless_experimental.py b/pycdp/cdp/headless_experimental.py index 37be695..7577334 100644 --- a/pycdp/cdp/headless_experimental.py +++ b/pycdp/cdp/headless_experimental.py @@ -26,19 +26,25 @@ class ScreenshotParams: #: Compression quality from range [0..100] (jpeg only). quality: typing.Optional[int] = None + #: Optimize image encoding for speed, not for resulting size (defaults to false) + optimize_for_speed: typing.Optional[bool] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() if self.format_ is not None: json['format'] = self.format_ if self.quality is not None: json['quality'] = self.quality + if self.optimize_for_speed is not None: + json['optimizeForSpeed'] = self.optimize_for_speed return json @classmethod def from_json(cls, json: T_JSON_DICT) -> ScreenshotParams: return cls( - format_=str(json['format']) if 'format' in json else None, - quality=int(json['quality']) if 'quality' in json else None, + format_=str(json['format']) if json.get('format', None) is not None else None, + quality=int(json['quality']) if json.get('quality', None) is not None else None, + optimize_for_speed=bool(json['optimizeForSpeed']) if json.get('optimizeForSpeed', None) is not None else None, ) @@ -52,7 +58,7 @@ def begin_frame( Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a screenshot from the resulting frame. Requires that the target was created with enabled BeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also - https://goo.gl/3zHXhB for more background. + https://goo.gle/chrome-headless-rendering for more background. :param frame_time_ticks: *(Optional)* Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set, the current time will be used. :param interval: *(Optional)* The interval between BeginFrames that is reported to the compositor, in milliseconds. Defaults to a 60 frames/second interval, i.e. about 16.666 milliseconds. @@ -79,13 +85,16 @@ def begin_frame( json = yield cmd_dict return ( bool(json['hasDamage']), - str(json['screenshotData']) if 'screenshotData' in json else None + str(json['screenshotData']) if json.get('screenshotData', None) is not None else None ) +@deprecated(version="1.3") def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Disables headless events for the target. + + .. deprecated:: 1.3 ''' cmd_dict: T_JSON_DICT = { 'method': 'HeadlessExperimental.disable', @@ -93,30 +102,14 @@ def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: json = yield cmd_dict +@deprecated(version="1.3") def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Enables headless events for the target. + + .. deprecated:: 1.3 ''' cmd_dict: T_JSON_DICT = { 'method': 'HeadlessExperimental.enable', } json = yield cmd_dict - - -@deprecated(version="1.3") -@event_class('HeadlessExperimental.needsBeginFramesChanged') -@dataclass -class NeedsBeginFramesChanged: - ''' - Issued when the target starts or stops needing BeginFrames. - Deprecated. Issue beginFrame unconditionally instead and use result from - beginFrame to detect whether the frames were suppressed. - ''' - #: True if BeginFrames are needed, false otherwise. - needs_begin_frames: bool - - @classmethod - def from_json(cls, json: T_JSON_DICT) -> NeedsBeginFramesChanged: - return cls( - needs_begin_frames=bool(json['needsBeginFrames']) - ) diff --git a/pycdp/cdp/heap_profiler.py b/pycdp/cdp/heap_profiler.py index cf6717d..e28df4d 100644 --- a/pycdp/cdp/heap_profiler.py +++ b/pycdp/cdp/heap_profiler.py @@ -212,14 +212,22 @@ def get_sampling_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SamplingH def start_sampling( - sampling_interval: typing.Optional[float] = None + sampling_interval: typing.Optional[float] = None, + include_objects_collected_by_major_gc: typing.Optional[bool] = None, + include_objects_collected_by_minor_gc: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' :param sampling_interval: *(Optional)* Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes. + :param include_objects_collected_by_major_gc: *(Optional)* By default, the sampling heap profiler reports only objects which are still alive when the profile is returned via getSamplingProfile or stopSampling, which is useful for determining what functions contribute the most to steady-state memory usage. This flag instructs the sampling heap profiler to also include information about objects discarded by major GC, which will show which functions cause large temporary memory usage or long GC pauses. + :param include_objects_collected_by_minor_gc: *(Optional)* By default, the sampling heap profiler reports only objects which are still alive when the profile is returned via getSamplingProfile or stopSampling, which is useful for determining what functions contribute the most to steady-state memory usage. This flag instructs the sampling heap profiler to also include information about objects discarded by minor GC, which is useful when tuning a latency-sensitive application for minimal GC activity. ''' params: T_JSON_DICT = dict() if sampling_interval is not None: params['samplingInterval'] = sampling_interval + if include_objects_collected_by_major_gc is not None: + params['includeObjectsCollectedByMajorGC'] = include_objects_collected_by_major_gc + if include_objects_collected_by_minor_gc is not None: + params['includeObjectsCollectedByMinorGC'] = include_objects_collected_by_minor_gc cmd_dict: T_JSON_DICT = { 'method': 'HeapProfiler.startSampling', 'params': params, @@ -259,12 +267,14 @@ def stop_sampling() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SamplingHeapProf def stop_tracking_heap_objects( report_progress: typing.Optional[bool] = None, treat_global_objects_as_roots: typing.Optional[bool] = None, - capture_numeric_value: typing.Optional[bool] = None + capture_numeric_value: typing.Optional[bool] = None, + expose_internals: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' :param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped. - :param treat_global_objects_as_roots: *(Optional)* + :param treat_global_objects_as_roots: **(DEPRECATED)** *(Optional)* Deprecated in favor of ```exposeInternals```. :param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot + :param expose_internals: **(EXPERIMENTAL)** *(Optional)* If true, exposes internals of the snapshot. ''' params: T_JSON_DICT = dict() if report_progress is not None: @@ -273,6 +283,8 @@ def stop_tracking_heap_objects( params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots if capture_numeric_value is not None: params['captureNumericValue'] = capture_numeric_value + if expose_internals is not None: + params['exposeInternals'] = expose_internals cmd_dict: T_JSON_DICT = { 'method': 'HeapProfiler.stopTrackingHeapObjects', 'params': params, @@ -283,12 +295,14 @@ def stop_tracking_heap_objects( def take_heap_snapshot( report_progress: typing.Optional[bool] = None, treat_global_objects_as_roots: typing.Optional[bool] = None, - capture_numeric_value: typing.Optional[bool] = None + capture_numeric_value: typing.Optional[bool] = None, + expose_internals: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' :param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. - :param treat_global_objects_as_roots: *(Optional)* If true, a raw snapshot without artificial roots will be generated + :param treat_global_objects_as_roots: **(DEPRECATED)** *(Optional)* If true, a raw snapshot without artificial roots will be generated. Deprecated in favor of ```exposeInternals```. :param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot + :param expose_internals: **(EXPERIMENTAL)** *(Optional)* If true, exposes internals of the snapshot. ''' params: T_JSON_DICT = dict() if report_progress is not None: @@ -297,6 +311,8 @@ def take_heap_snapshot( params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots if capture_numeric_value is not None: params['captureNumericValue'] = capture_numeric_value + if expose_internals is not None: + params['exposeInternals'] = expose_internals cmd_dict: T_JSON_DICT = { 'method': 'HeapProfiler.takeHeapSnapshot', 'params': params, @@ -365,7 +381,7 @@ def from_json(cls, json: T_JSON_DICT) -> ReportHeapSnapshotProgress: return cls( done=int(json['done']), total=int(json['total']), - finished=bool(json['finished']) if 'finished' in json else None + finished=bool(json['finished']) if json.get('finished', None) is not None else None ) diff --git a/pycdp/cdp/indexed_db.py b/pycdp/cdp/indexed_db.py index 06fe96d..1685e07 100644 --- a/pycdp/cdp/indexed_db.py +++ b/pycdp/cdp/indexed_db.py @@ -152,10 +152,10 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> Key: return cls( type_=str(json['type']), - number=float(json['number']) if 'number' in json else None, - string=str(json['string']) if 'string' in json else None, - date=float(json['date']) if 'date' in json else None, - array=[Key.from_json(i) for i in json['array']] if 'array' in json else None, + number=float(json['number']) if json.get('number', None) is not None else None, + string=str(json['string']) if json.get('string', None) is not None else None, + date=float(json['date']) if json.get('date', None) is not None else None, + array=[Key.from_json(i) for i in json['array']] if json.get('array', None) is not None else None, ) @@ -191,8 +191,8 @@ def from_json(cls, json: T_JSON_DICT) -> KeyRange: return cls( lower_open=bool(json['lowerOpen']), upper_open=bool(json['upperOpen']), - lower=Key.from_json(json['lower']) if 'lower' in json else None, - upper=Key.from_json(json['upper']) if 'upper' in json else None, + lower=Key.from_json(json['lower']) if json.get('lower', None) is not None else None, + upper=Key.from_json(json['upper']) if json.get('upper', None) is not None else None, ) @@ -253,25 +253,30 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> KeyPath: return cls( type_=str(json['type']), - string=str(json['string']) if 'string' in json else None, - array=[str(i) for i in json['array']] if 'array' in json else None, + string=str(json['string']) if json.get('string', None) is not None else None, + array=[str(i) for i in json['array']] if json.get('array', None) is not None else None, ) def clear_object_store( - security_origin: str, database_name: str, - object_store_name: str + object_store_name: str, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Clears all entries from an object store. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: Database name. :param object_store_name: Object store name. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name params['objectStoreName'] = object_store_name cmd_dict: T_JSON_DICT = { @@ -282,17 +287,22 @@ def clear_object_store( def delete_database( - security_origin: str, - database_name: str + database_name: str, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Deletes a database. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: Database name. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name cmd_dict: T_JSON_DICT = { 'method': 'IndexedDB.deleteDatabase', @@ -302,21 +312,26 @@ def delete_database( def delete_object_store_entries( - security_origin: str, database_name: str, object_store_name: str, - key_range: KeyRange + key_range: KeyRange, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Delete a range of entries from an object store - :param security_origin: + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: :param object_store_name: :param key_range: Range of entry keys to delete ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name params['objectStoreName'] = object_store_name params['keyRange'] = key_range.to_json() @@ -348,18 +363,20 @@ def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: def request_data( - security_origin: str, database_name: str, object_store_name: str, index_name: str, skip_count: int, page_size: int, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None, key_range: typing.Optional[KeyRange] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[DataEntry], bool]]: ''' Requests data from object store or index. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: Database name. :param object_store_name: Object store name. :param index_name: Index name, empty string for object store data requests. @@ -372,7 +389,10 @@ def request_data( 1. **hasMore** - If true, there are more entries to fetch in the given range. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name params['objectStoreName'] = object_store_name params['indexName'] = index_name @@ -392,14 +412,16 @@ def request_data( def get_metadata( - security_origin: str, database_name: str, - object_store_name: str + object_store_name: str, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[float, float]]: ''' - Gets metadata of an object store + Gets metadata of an object store. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: Database name. :param object_store_name: Object store name. :returns: A tuple with the following items: @@ -408,7 +430,10 @@ def get_metadata( 1. **keyGeneratorValue** - the current value of key generator, to become the next inserted key into the object store. Valid if objectStore.autoIncrement is true. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name params['objectStoreName'] = object_store_name cmd_dict: T_JSON_DICT = { @@ -423,18 +448,23 @@ def get_metadata( def request_database( - security_origin: str, - database_name: str + database_name: str, + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,DatabaseWithObjectStores]: ''' Requests database with given name in given frame. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :param database_name: Database name. :returns: Database with an array of object stores. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key params['databaseName'] = database_name cmd_dict: T_JSON_DICT = { 'method': 'IndexedDB.requestDatabase', @@ -445,16 +475,21 @@ def request_database( def request_database_names( - security_origin: str + security_origin: typing.Optional[str] = None, + storage_key: typing.Optional[str] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[str]]: ''' Requests database names for given security origin. - :param security_origin: Security origin. + :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin. + :param storage_key: *(Optional)* Storage key. :returns: Database names for origin. ''' params: T_JSON_DICT = dict() - params['securityOrigin'] = security_origin + if security_origin is not None: + params['securityOrigin'] = security_origin + if storage_key is not None: + params['storageKey'] = storage_key cmd_dict: T_JSON_DICT = { 'method': 'IndexedDB.requestDatabaseNames', 'params': params, diff --git a/pycdp/cdp/input_.py b/pycdp/cdp/input_.py index 4705f37..0b3d175 100644 --- a/pycdp/cdp/input_.py +++ b/pycdp/cdp/input_.py @@ -77,15 +77,15 @@ def from_json(cls, json: T_JSON_DICT) -> TouchPoint: return cls( x=float(json['x']), y=float(json['y']), - radius_x=float(json['radiusX']) if 'radiusX' in json else None, - radius_y=float(json['radiusY']) if 'radiusY' in json else None, - rotation_angle=float(json['rotationAngle']) if 'rotationAngle' in json else None, - force=float(json['force']) if 'force' in json else None, - tangential_pressure=float(json['tangentialPressure']) if 'tangentialPressure' in json else None, - tilt_x=int(json['tiltX']) if 'tiltX' in json else None, - tilt_y=int(json['tiltY']) if 'tiltY' in json else None, - twist=int(json['twist']) if 'twist' in json else None, - id_=float(json['id']) if 'id' in json else None, + radius_x=float(json['radiusX']) if json.get('radiusX', None) is not None else None, + radius_y=float(json['radiusY']) if json.get('radiusY', None) is not None else None, + rotation_angle=float(json['rotationAngle']) if json.get('rotationAngle', None) is not None else None, + force=float(json['force']) if json.get('force', None) is not None else None, + tangential_pressure=float(json['tangentialPressure']) if json.get('tangentialPressure', None) is not None else None, + tilt_x=int(json['tiltX']) if json.get('tiltX', None) is not None else None, + tilt_y=int(json['tiltY']) if json.get('tiltY', None) is not None else None, + twist=int(json['twist']) if json.get('twist', None) is not None else None, + id_=float(json['id']) if json.get('id', None) is not None else None, ) @@ -164,8 +164,8 @@ def from_json(cls, json: T_JSON_DICT) -> DragDataItem: return cls( mime_type=str(json['mimeType']), data=str(json['data']), - title=str(json['title']) if 'title' in json else None, - base_url=str(json['baseURL']) if 'baseURL' in json else None, + title=str(json['title']) if json.get('title', None) is not None else None, + base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None, ) @@ -192,7 +192,7 @@ def from_json(cls, json: T_JSON_DICT) -> DragData: return cls( items=[DragDataItem.from_json(i) for i in json['items']], drag_operations_mask=int(json['dragOperationsMask']), - files=[str(i) for i in json['files']] if 'files' in json else None, + files=[str(i) for i in json['files']] if json.get('files', None) is not None else None, ) diff --git a/pycdp/cdp/io.py b/pycdp/cdp/io.py index 3590778..1424cd7 100644 --- a/pycdp/cdp/io.py +++ b/pycdp/cdp/io.py @@ -76,7 +76,7 @@ def read( } json = yield cmd_dict return ( - bool(json['base64Encoded']) if 'base64Encoded' in json else None, + bool(json['base64Encoded']) if json.get('base64Encoded', None) is not None else None, str(json['data']), bool(json['eof']) ) diff --git a/pycdp/cdp/layer_tree.py b/pycdp/cdp/layer_tree.py index c94d67c..08d8539 100644 --- a/pycdp/cdp/layer_tree.py +++ b/pycdp/cdp/layer_tree.py @@ -101,8 +101,8 @@ def from_json(cls, json: T_JSON_DICT) -> StickyPositionConstraint: return cls( sticky_box_rect=dom.Rect.from_json(json['stickyBoxRect']), containing_block_rect=dom.Rect.from_json(json['containingBlockRect']), - nearest_layer_shifting_sticky_box=LayerId.from_json(json['nearestLayerShiftingStickyBox']) if 'nearestLayerShiftingStickyBox' in json else None, - nearest_layer_shifting_containing_block=LayerId.from_json(json['nearestLayerShiftingContainingBlock']) if 'nearestLayerShiftingContainingBlock' in json else None, + nearest_layer_shifting_sticky_box=LayerId.from_json(json['nearestLayerShiftingStickyBox']) if json.get('nearestLayerShiftingStickyBox', None) is not None else None, + nearest_layer_shifting_containing_block=LayerId.from_json(json['nearestLayerShiftingContainingBlock']) if json.get('nearestLayerShiftingContainingBlock', None) is not None else None, ) @@ -229,15 +229,15 @@ def from_json(cls, json: T_JSON_DICT) -> Layer: height=float(json['height']), paint_count=int(json['paintCount']), draws_content=bool(json['drawsContent']), - parent_layer_id=LayerId.from_json(json['parentLayerId']) if 'parentLayerId' in json else None, - backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None, - transform=[float(i) for i in json['transform']] if 'transform' in json else None, - anchor_x=float(json['anchorX']) if 'anchorX' in json else None, - anchor_y=float(json['anchorY']) if 'anchorY' in json else None, - anchor_z=float(json['anchorZ']) if 'anchorZ' in json else None, - invisible=bool(json['invisible']) if 'invisible' in json else None, - scroll_rects=[ScrollRect.from_json(i) for i in json['scrollRects']] if 'scrollRects' in json else None, - sticky_position_constraint=StickyPositionConstraint.from_json(json['stickyPositionConstraint']) if 'stickyPositionConstraint' in json else None, + parent_layer_id=LayerId.from_json(json['parentLayerId']) if json.get('parentLayerId', None) is not None else None, + backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None, + transform=[float(i) for i in json['transform']] if json.get('transform', None) is not None else None, + anchor_x=float(json['anchorX']) if json.get('anchorX', None) is not None else None, + anchor_y=float(json['anchorY']) if json.get('anchorY', None) is not None else None, + anchor_z=float(json['anchorZ']) if json.get('anchorZ', None) is not None else None, + invisible=bool(json['invisible']) if json.get('invisible', None) is not None else None, + scroll_rects=[ScrollRect.from_json(i) for i in json['scrollRects']] if json.get('scrollRects', None) is not None else None, + sticky_position_constraint=StickyPositionConstraint.from_json(json['stickyPositionConstraint']) if json.get('stickyPositionConstraint', None) is not None else None, ) @@ -460,5 +460,5 @@ class LayerTreeDidChange: @classmethod def from_json(cls, json: T_JSON_DICT) -> LayerTreeDidChange: return cls( - layers=[Layer.from_json(i) for i in json['layers']] if 'layers' in json else None + layers=[Layer.from_json(i) for i in json['layers']] if json.get('layers', None) is not None else None ) diff --git a/pycdp/cdp/log.py b/pycdp/cdp/log.py index 44db254..4acafb3 100644 --- a/pycdp/cdp/log.py +++ b/pycdp/cdp/log.py @@ -81,13 +81,13 @@ def from_json(cls, json: T_JSON_DICT) -> LogEntry: level=str(json['level']), text=str(json['text']), timestamp=runtime.Timestamp.from_json(json['timestamp']), - category=str(json['category']) if 'category' in json else None, - url=str(json['url']) if 'url' in json else None, - line_number=int(json['lineNumber']) if 'lineNumber' in json else None, - stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None, - network_request_id=network.RequestId.from_json(json['networkRequestId']) if 'networkRequestId' in json else None, - worker_id=str(json['workerId']) if 'workerId' in json else None, - args=[runtime.RemoteObject.from_json(i) for i in json['args']] if 'args' in json else None, + category=str(json['category']) if json.get('category', None) is not None else None, + url=str(json['url']) if json.get('url', None) is not None else None, + line_number=int(json['lineNumber']) if json.get('lineNumber', None) is not None else None, + stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None, + network_request_id=network.RequestId.from_json(json['networkRequestId']) if json.get('networkRequestId', None) is not None else None, + worker_id=str(json['workerId']) if json.get('workerId', None) is not None else None, + args=[runtime.RemoteObject.from_json(i) for i in json['args']] if json.get('args', None) is not None else None, ) diff --git a/pycdp/cdp/media.py b/pycdp/cdp/media.py index fd15922..7f6f430 100644 --- a/pycdp/cdp/media.py +++ b/pycdp/cdp/media.py @@ -118,31 +118,68 @@ def from_json(cls, json: T_JSON_DICT) -> PlayerEvent: ) +@dataclass +class PlayerErrorSourceLocation: + ''' + Represents logged source line numbers reported in an error. + NOTE: file and line are from chromium c++ implementation code, not js. + ''' + file: str + + line: int + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['file'] = self.file + json['line'] = self.line + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> PlayerErrorSourceLocation: + return cls( + file=str(json['file']), + line=int(json['line']), + ) + + @dataclass class PlayerError: ''' Corresponds to kMediaError ''' - type_: str + error_type: str + + #: Code is the numeric enum entry for a specific set of error codes, such + #: as PipelineStatusCodes in media/base/pipeline_status.h + code: int + + #: A trace of where this error was caused / where it passed through. + stack: typing.List[PlayerErrorSourceLocation] + + #: Errors potentially have a root cause error, ie, a DecoderError might be + #: caused by an WindowsError + cause: typing.List[PlayerError] - #: When this switches to using media::Status instead of PipelineStatus - #: we can remove "errorCode" and replace it with the fields from - #: a Status instance. This also seems like a duplicate of the error - #: level enum - there is a todo bug to have that level removed and - #: use this instead. (crbug.com/1068454) - error_code: str + #: Extra data attached to an error, such as an HRESULT, Video Codec, etc. + data: dict def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() - json['type'] = self.type_ - json['errorCode'] = self.error_code + json['errorType'] = self.error_type + json['code'] = self.code + json['stack'] = [i.to_json() for i in self.stack] + json['cause'] = [i.to_json() for i in self.cause] + json['data'] = self.data return json @classmethod def from_json(cls, json: T_JSON_DICT) -> PlayerError: return cls( - type_=str(json['type']), - error_code=str(json['errorCode']), + error_type=str(json['errorType']), + code=int(json['code']), + stack=[PlayerErrorSourceLocation.from_json(i) for i in json['stack']], + cause=[PlayerError.from_json(i) for i in json['cause']], + data=dict(json['data']), ) diff --git a/pycdp/cdp/network.py b/pycdp/cdp/network.py index 0d10271..e70981f 100644 --- a/pycdp/cdp/network.py +++ b/pycdp/cdp/network.py @@ -33,6 +33,7 @@ class ResourceType(enum.Enum): TEXT_TRACK = "TextTrack" XHR = "XHR" FETCH = "Fetch" + PREFETCH = "Prefetch" EVENT_SOURCE = "EventSource" WEB_SOCKET = "WebSocket" MANIFEST = "Manifest" @@ -381,7 +382,7 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> PostDataEntry: return cls( - bytes_=str(json['bytes']) if 'bytes' in json else None, + bytes_=str(json['bytes']) if json.get('bytes', None) is not None else None, ) @@ -464,14 +465,14 @@ def from_json(cls, json: T_JSON_DICT) -> Request: headers=Headers.from_json(json['headers']), initial_priority=ResourcePriority.from_json(json['initialPriority']), referrer_policy=str(json['referrerPolicy']), - url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None, - post_data=str(json['postData']) if 'postData' in json else None, - has_post_data=bool(json['hasPostData']) if 'hasPostData' in json else None, - post_data_entries=[PostDataEntry.from_json(i) for i in json['postDataEntries']] if 'postDataEntries' in json else None, - mixed_content_type=security.MixedContentType.from_json(json['mixedContentType']) if 'mixedContentType' in json else None, - is_link_preload=bool(json['isLinkPreload']) if 'isLinkPreload' in json else None, - trust_token_params=TrustTokenParams.from_json(json['trustTokenParams']) if 'trustTokenParams' in json else None, - is_same_site=bool(json['isSameSite']) if 'isSameSite' in json else None, + url_fragment=str(json['urlFragment']) if json.get('urlFragment', None) is not None else None, + post_data=str(json['postData']) if json.get('postData', None) is not None else None, + has_post_data=bool(json['hasPostData']) if json.get('hasPostData', None) is not None else None, + post_data_entries=[PostDataEntry.from_json(i) for i in json['postDataEntries']] if json.get('postDataEntries', None) is not None else None, + mixed_content_type=security.MixedContentType.from_json(json['mixedContentType']) if json.get('mixedContentType', None) is not None else None, + is_link_preload=bool(json['isLinkPreload']) if json.get('isLinkPreload', None) is not None else None, + trust_token_params=TrustTokenParams.from_json(json['trustTokenParams']) if json.get('trustTokenParams', None) is not None else None, + is_same_site=bool(json['isSameSite']) if json.get('isSameSite', None) is not None else None, ) @@ -569,12 +570,20 @@ class SecurityDetails: #: Whether the request complied with Certificate Transparency policy certificate_transparency_compliance: CertificateTransparencyCompliance + #: Whether the connection used Encrypted ClientHello + encrypted_client_hello: bool + #: (EC)DH group used by the connection, if applicable. key_exchange_group: typing.Optional[str] = None #: TLS MAC. Note that AEAD ciphers do not have separate MACs. mac: typing.Optional[str] = None + #: The signature algorithm used by the server in the TLS server signature, + #: represented as a TLS SignatureScheme code point. Omitted if not + #: applicable or not known. + server_signature_algorithm: typing.Optional[int] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['protocol'] = self.protocol @@ -588,10 +597,13 @@ def to_json(self) -> T_JSON_DICT: json['validTo'] = self.valid_to.to_json() json['signedCertificateTimestampList'] = [i.to_json() for i in self.signed_certificate_timestamp_list] json['certificateTransparencyCompliance'] = self.certificate_transparency_compliance.to_json() + json['encryptedClientHello'] = self.encrypted_client_hello if self.key_exchange_group is not None: json['keyExchangeGroup'] = self.key_exchange_group if self.mac is not None: json['mac'] = self.mac + if self.server_signature_algorithm is not None: + json['serverSignatureAlgorithm'] = self.server_signature_algorithm return json @classmethod @@ -608,8 +620,10 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityDetails: valid_to=TimeSinceEpoch.from_json(json['validTo']), signed_certificate_timestamp_list=[SignedCertificateTimestamp.from_json(i) for i in json['signedCertificateTimestampList']], certificate_transparency_compliance=CertificateTransparencyCompliance.from_json(json['certificateTransparencyCompliance']), - key_exchange_group=str(json['keyExchangeGroup']) if 'keyExchangeGroup' in json else None, - mac=str(json['mac']) if 'mac' in json else None, + encrypted_client_hello=bool(json['encryptedClientHello']), + key_exchange_group=str(json['keyExchangeGroup']) if json.get('keyExchangeGroup', None) is not None else None, + mac=str(json['mac']) if json.get('mac', None) is not None else None, + server_signature_algorithm=int(json['serverSignatureAlgorithm']) if json.get('serverSignatureAlgorithm', None) is not None else None, ) @@ -741,9 +755,9 @@ class TrustTokenParams: depending on the type, some additional parameters. The values are specified in third_party/blink/renderer/core/fetch/trust_token.idl. ''' - type_: TrustTokenOperationType + operation: TrustTokenOperationType - #: Only set for "token-redemption" type and determine whether + #: Only set for "token-redemption" operation and determine whether #: to request a fresh SRR or use a still valid cached SRR. refresh_policy: str @@ -753,7 +767,7 @@ class TrustTokenParams: def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() - json['type'] = self.type_.to_json() + json['operation'] = self.operation.to_json() json['refreshPolicy'] = self.refresh_policy if self.issuers is not None: json['issuers'] = [i for i in self.issuers] @@ -762,9 +776,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> TrustTokenParams: return cls( - type_=TrustTokenOperationType.from_json(json['type']), + operation=TrustTokenOperationType.from_json(json['operation']), refresh_policy=str(json['refreshPolicy']), - issuers=[str(i) for i in json['issuers']] if 'issuers' in json else None, + issuers=[str(i) for i in json['issuers']] if json.get('issuers', None) is not None else None, ) @@ -781,6 +795,27 @@ def from_json(cls, json: str) -> TrustTokenOperationType: return cls(json) +class AlternateProtocolUsage(enum.Enum): + ''' + The reason why Chrome uses a specific transport protocol for HTTP semantics. + ''' + ALTERNATIVE_JOB_WON_WITHOUT_RACE = "alternativeJobWonWithoutRace" + ALTERNATIVE_JOB_WON_RACE = "alternativeJobWonRace" + MAIN_JOB_WON_RACE = "mainJobWonRace" + MAPPING_MISSING = "mappingMissing" + BROKEN = "broken" + DNS_ALPN_H3_JOB_WON_WITHOUT_RACE = "dnsAlpnH3JobWonWithoutRace" + DNS_ALPN_H3_JOB_WON_RACE = "dnsAlpnH3JobWonRace" + UNSPECIFIED_REASON = "unspecifiedReason" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> AlternateProtocolUsage: + return cls(json) + + @dataclass class Response: ''' @@ -852,6 +887,9 @@ class Response: #: Protocol used to fetch this request. protocol: typing.Optional[str] = None + #: The reason why Chrome uses a specific transport protocol for HTTP semantics. + alternate_protocol_usage: typing.Optional[AlternateProtocolUsage] = None + #: Security details for the request. security_details: typing.Optional[SecurityDetails] = None @@ -892,6 +930,8 @@ def to_json(self) -> T_JSON_DICT: json['cacheStorageCacheName'] = self.cache_storage_cache_name if self.protocol is not None: json['protocol'] = self.protocol + if self.alternate_protocol_usage is not None: + json['alternateProtocolUsage'] = self.alternate_protocol_usage.to_json() if self.security_details is not None: json['securityDetails'] = self.security_details.to_json() return json @@ -908,20 +948,21 @@ def from_json(cls, json: T_JSON_DICT) -> Response: connection_id=float(json['connectionId']), encoded_data_length=float(json['encodedDataLength']), security_state=security.SecurityState.from_json(json['securityState']), - headers_text=str(json['headersText']) if 'headersText' in json else None, - request_headers=Headers.from_json(json['requestHeaders']) if 'requestHeaders' in json else None, - request_headers_text=str(json['requestHeadersText']) if 'requestHeadersText' in json else None, - remote_ip_address=str(json['remoteIPAddress']) if 'remoteIPAddress' in json else None, - remote_port=int(json['remotePort']) if 'remotePort' in json else None, - from_disk_cache=bool(json['fromDiskCache']) if 'fromDiskCache' in json else None, - from_service_worker=bool(json['fromServiceWorker']) if 'fromServiceWorker' in json else None, - from_prefetch_cache=bool(json['fromPrefetchCache']) if 'fromPrefetchCache' in json else None, - timing=ResourceTiming.from_json(json['timing']) if 'timing' in json else None, - service_worker_response_source=ServiceWorkerResponseSource.from_json(json['serviceWorkerResponseSource']) if 'serviceWorkerResponseSource' in json else None, - response_time=TimeSinceEpoch.from_json(json['responseTime']) if 'responseTime' in json else None, - cache_storage_cache_name=str(json['cacheStorageCacheName']) if 'cacheStorageCacheName' in json else None, - protocol=str(json['protocol']) if 'protocol' in json else None, - security_details=SecurityDetails.from_json(json['securityDetails']) if 'securityDetails' in json else None, + headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None, + request_headers=Headers.from_json(json['requestHeaders']) if json.get('requestHeaders', None) is not None else None, + request_headers_text=str(json['requestHeadersText']) if json.get('requestHeadersText', None) is not None else None, + remote_ip_address=str(json['remoteIPAddress']) if json.get('remoteIPAddress', None) is not None else None, + remote_port=int(json['remotePort']) if json.get('remotePort', None) is not None else None, + from_disk_cache=bool(json['fromDiskCache']) if json.get('fromDiskCache', None) is not None else None, + from_service_worker=bool(json['fromServiceWorker']) if json.get('fromServiceWorker', None) is not None else None, + from_prefetch_cache=bool(json['fromPrefetchCache']) if json.get('fromPrefetchCache', None) is not None else None, + timing=ResourceTiming.from_json(json['timing']) if json.get('timing', None) is not None else None, + service_worker_response_source=ServiceWorkerResponseSource.from_json(json['serviceWorkerResponseSource']) if json.get('serviceWorkerResponseSource', None) is not None else None, + response_time=TimeSinceEpoch.from_json(json['responseTime']) if json.get('responseTime', None) is not None else None, + cache_storage_cache_name=str(json['cacheStorageCacheName']) if json.get('cacheStorageCacheName', None) is not None else None, + protocol=str(json['protocol']) if json.get('protocol', None) is not None else None, + alternate_protocol_usage=AlternateProtocolUsage.from_json(json['alternateProtocolUsage']) if json.get('alternateProtocolUsage', None) is not None else None, + security_details=SecurityDetails.from_json(json['securityDetails']) if json.get('securityDetails', None) is not None else None, ) @@ -987,9 +1028,9 @@ def from_json(cls, json: T_JSON_DICT) -> WebSocketResponse: status=int(json['status']), status_text=str(json['statusText']), headers=Headers.from_json(json['headers']), - headers_text=str(json['headersText']) if 'headersText' in json else None, - request_headers=Headers.from_json(json['requestHeaders']) if 'requestHeaders' in json else None, - request_headers_text=str(json['requestHeadersText']) if 'requestHeadersText' in json else None, + headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None, + request_headers=Headers.from_json(json['requestHeaders']) if json.get('requestHeaders', None) is not None else None, + request_headers_text=str(json['requestHeadersText']) if json.get('requestHeadersText', None) is not None else None, ) @@ -1057,7 +1098,7 @@ def from_json(cls, json: T_JSON_DICT) -> CachedResource: url=str(json['url']), type_=ResourceType.from_json(json['type']), body_size=float(json['bodySize']), - response=Response.from_json(json['response']) if 'response' in json else None, + response=Response.from_json(json['response']) if json.get('response', None) is not None else None, ) @@ -1105,11 +1146,11 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> Initiator: return cls( type_=str(json['type']), - stack=runtime.StackTrace.from_json(json['stack']) if 'stack' in json else None, - url=str(json['url']) if 'url' in json else None, - line_number=float(json['lineNumber']) if 'lineNumber' in json else None, - column_number=float(json['columnNumber']) if 'columnNumber' in json else None, - request_id=RequestId.from_json(json['requestId']) if 'requestId' in json else None, + stack=runtime.StackTrace.from_json(json['stack']) if json.get('stack', None) is not None else None, + url=str(json['url']) if json.get('url', None) is not None else None, + line_number=float(json['lineNumber']) if json.get('lineNumber', None) is not None else None, + column_number=float(json['columnNumber']) if json.get('columnNumber', None) is not None else None, + request_id=RequestId.from_json(json['requestId']) if json.get('requestId', None) is not None else None, ) @@ -1130,9 +1171,6 @@ class Cookie: #: Cookie path. path: str - #: Cookie expiration date as the number of seconds since the UNIX epoch. - expires: float - #: Cookie size. size: int @@ -1159,6 +1197,9 @@ class Cookie: #: This is a temporary ability and it will be removed in the future. source_port: int + #: Cookie expiration date as the number of seconds since the UNIX epoch. + expires: typing.Optional[float] = None + #: Cookie SameSite type. same_site: typing.Optional[CookieSameSite] = None @@ -1175,7 +1216,6 @@ def to_json(self) -> T_JSON_DICT: json['value'] = self.value json['domain'] = self.domain json['path'] = self.path - json['expires'] = self.expires json['size'] = self.size json['httpOnly'] = self.http_only json['secure'] = self.secure @@ -1184,6 +1224,8 @@ def to_json(self) -> T_JSON_DICT: json['sameParty'] = self.same_party json['sourceScheme'] = self.source_scheme.to_json() json['sourcePort'] = self.source_port + if self.expires is not None: + json['expires'] = self.expires if self.same_site is not None: json['sameSite'] = self.same_site.to_json() if self.partition_key is not None: @@ -1199,7 +1241,6 @@ def from_json(cls, json: T_JSON_DICT) -> Cookie: value=str(json['value']), domain=str(json['domain']), path=str(json['path']), - expires=float(json['expires']), size=int(json['size']), http_only=bool(json['httpOnly']), secure=bool(json['secure']), @@ -1208,9 +1249,10 @@ def from_json(cls, json: T_JSON_DICT) -> Cookie: same_party=bool(json['sameParty']), source_scheme=CookieSourceScheme.from_json(json['sourceScheme']), source_port=int(json['sourcePort']), - same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None, - partition_key=str(json['partitionKey']) if 'partitionKey' in json else None, - partition_key_opaque=bool(json['partitionKeyOpaque']) if 'partitionKeyOpaque' in json else None, + expires=float(json['expires']) if json.get('expires', None) is not None else None, + same_site=CookieSameSite.from_json(json['sameSite']) if json.get('sameSite', None) is not None else None, + partition_key=str(json['partitionKey']) if json.get('partitionKey', None) is not None else None, + partition_key_opaque=bool(json['partitionKeyOpaque']) if json.get('partitionKeyOpaque', None) is not None else None, ) @@ -1224,6 +1266,7 @@ class SetCookieBlockedReason(enum.Enum): SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax" SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure" USER_PREFERENCES = "UserPreferences" + THIRD_PARTY_BLOCKED_IN_FIRST_PARTY_SET = "ThirdPartyBlockedInFirstPartySet" SYNTAX_ERROR = "SyntaxError" SCHEME_NOT_SUPPORTED = "SchemeNotSupported" OVERWRITE_SECURE = "OverwriteSecure" @@ -1257,6 +1300,7 @@ class CookieBlockedReason(enum.Enum): SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax" SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure" USER_PREFERENCES = "UserPreferences" + THIRD_PARTY_BLOCKED_IN_FIRST_PARTY_SET = "ThirdPartyBlockedInFirstPartySet" UNKNOWN_ERROR = "UnknownError" SCHEMEFUL_SAME_SITE_STRICT = "SchemefulSameSiteStrict" SCHEMEFUL_SAME_SITE_LAX = "SchemefulSameSiteLax" @@ -1302,7 +1346,7 @@ def from_json(cls, json: T_JSON_DICT) -> BlockedSetCookieWithReason: return cls( blocked_reasons=[SetCookieBlockedReason.from_json(i) for i in json['blockedReasons']], cookie_line=str(json['cookieLine']), - cookie=Cookie.from_json(json['cookie']) if 'cookie' in json else None, + cookie=Cookie.from_json(json['cookie']) if json.get('cookie', None) is not None else None, ) @@ -1418,18 +1462,18 @@ def from_json(cls, json: T_JSON_DICT) -> CookieParam: return cls( name=str(json['name']), value=str(json['value']), - url=str(json['url']) if 'url' in json else None, - domain=str(json['domain']) if 'domain' in json else None, - path=str(json['path']) if 'path' in json else None, - secure=bool(json['secure']) if 'secure' in json else None, - http_only=bool(json['httpOnly']) if 'httpOnly' in json else None, - same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None, - expires=TimeSinceEpoch.from_json(json['expires']) if 'expires' in json else None, - priority=CookiePriority.from_json(json['priority']) if 'priority' in json else None, - same_party=bool(json['sameParty']) if 'sameParty' in json else None, - source_scheme=CookieSourceScheme.from_json(json['sourceScheme']) if 'sourceScheme' in json else None, - source_port=int(json['sourcePort']) if 'sourcePort' in json else None, - partition_key=str(json['partitionKey']) if 'partitionKey' in json else None, + url=str(json['url']) if json.get('url', None) is not None else None, + domain=str(json['domain']) if json.get('domain', None) is not None else None, + path=str(json['path']) if json.get('path', None) is not None else None, + secure=bool(json['secure']) if json.get('secure', None) is not None else None, + http_only=bool(json['httpOnly']) if json.get('httpOnly', None) is not None else None, + same_site=CookieSameSite.from_json(json['sameSite']) if json.get('sameSite', None) is not None else None, + expires=TimeSinceEpoch.from_json(json['expires']) if json.get('expires', None) is not None else None, + priority=CookiePriority.from_json(json['priority']) if json.get('priority', None) is not None else None, + same_party=bool(json['sameParty']) if json.get('sameParty', None) is not None else None, + source_scheme=CookieSourceScheme.from_json(json['sourceScheme']) if json.get('sourceScheme', None) is not None else None, + source_port=int(json['sourcePort']) if json.get('sourcePort', None) is not None else None, + partition_key=str(json['partitionKey']) if json.get('partitionKey', None) is not None else None, ) @@ -1465,7 +1509,7 @@ def from_json(cls, json: T_JSON_DICT) -> AuthChallenge: origin=str(json['origin']), scheme=str(json['scheme']), realm=str(json['realm']), - source=str(json['source']) if 'source' in json else None, + source=str(json['source']) if json.get('source', None) is not None else None, ) @@ -1500,8 +1544,8 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AuthChallengeResponse: return cls( response=str(json['response']), - username=str(json['username']) if 'username' in json else None, - password=str(json['password']) if 'password' in json else None, + username=str(json['username']) if json.get('username', None) is not None else None, + password=str(json['password']) if json.get('password', None) is not None else None, ) @@ -1549,9 +1593,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> RequestPattern: return cls( - url_pattern=str(json['urlPattern']) if 'urlPattern' in json else None, - resource_type=ResourceType.from_json(json['resourceType']) if 'resourceType' in json else None, - interception_stage=InterceptionStage.from_json(json['interceptionStage']) if 'interceptionStage' in json else None, + url_pattern=str(json['urlPattern']) if json.get('urlPattern', None) is not None else None, + resource_type=ResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None, + interception_stage=InterceptionStage.from_json(json['interceptionStage']) if json.get('interceptionStage', None) is not None else None, ) @@ -1613,9 +1657,9 @@ def from_json(cls, json: T_JSON_DICT) -> SignedExchangeSignature: validity_url=str(json['validityUrl']), date=int(json['date']), expires=int(json['expires']), - cert_url=str(json['certUrl']) if 'certUrl' in json else None, - cert_sha256=str(json['certSha256']) if 'certSha256' in json else None, - certificates=[str(i) for i in json['certificates']] if 'certificates' in json else None, + cert_url=str(json['certUrl']) if json.get('certUrl', None) is not None else None, + cert_sha256=str(json['certSha256']) if json.get('certSha256', None) is not None else None, + certificates=[str(i) for i in json['certificates']] if json.get('certificates', None) is not None else None, ) @@ -1706,8 +1750,8 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> SignedExchangeError: return cls( message=str(json['message']), - signature_index=int(json['signatureIndex']) if 'signatureIndex' in json else None, - error_field=SignedExchangeErrorField.from_json(json['errorField']) if 'errorField' in json else None, + signature_index=int(json['signatureIndex']) if json.get('signatureIndex', None) is not None else None, + error_field=SignedExchangeErrorField.from_json(json['errorField']) if json.get('errorField', None) is not None else None, ) @@ -1743,9 +1787,9 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> SignedExchangeInfo: return cls( outer_response=Response.from_json(json['outerResponse']), - header=SignedExchangeHeader.from_json(json['header']) if 'header' in json else None, - security_details=SecurityDetails.from_json(json['securityDetails']) if 'securityDetails' in json else None, - errors=[SignedExchangeError.from_json(i) for i in json['errors']] if 'errors' in json else None, + header=SignedExchangeHeader.from_json(json['header']) if json.get('header', None) is not None else None, + security_details=SecurityDetails.from_json(json['securityDetails']) if json.get('securityDetails', None) is not None else None, + errors=[SignedExchangeError.from_json(i) for i in json['errors']] if json.get('errors', None) is not None else None, ) @@ -1840,9 +1884,10 @@ def from_json(cls, json: T_JSON_DICT) -> ClientSecurityState: class CrossOriginOpenerPolicyValue(enum.Enum): SAME_ORIGIN = "SameOrigin" SAME_ORIGIN_ALLOW_POPUPS = "SameOriginAllowPopups" + RESTRICT_PROPERTIES = "RestrictProperties" UNSAFE_NONE = "UnsafeNone" SAME_ORIGIN_PLUS_COEP = "SameOriginPlusCoep" - SAME_ORIGIN_ALLOW_POPUPS_PLUS_COEP = "SameOriginAllowPopupsPlusCoep" + RESTRICT_PROPERTIES_PLUS_COEP = "RestrictPropertiesPlusCoep" def to_json(self) -> str: return self.value @@ -1877,8 +1922,8 @@ def from_json(cls, json: T_JSON_DICT) -> CrossOriginOpenerPolicyStatus: return cls( value=CrossOriginOpenerPolicyValue.from_json(json['value']), report_only_value=CrossOriginOpenerPolicyValue.from_json(json['reportOnlyValue']), - reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None, - report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None, + reporting_endpoint=str(json['reportingEndpoint']) if json.get('reportingEndpoint', None) is not None else None, + report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if json.get('reportOnlyReportingEndpoint', None) is not None else None, ) @@ -1920,8 +1965,8 @@ def from_json(cls, json: T_JSON_DICT) -> CrossOriginEmbedderPolicyStatus: return cls( value=CrossOriginEmbedderPolicyValue.from_json(json['value']), report_only_value=CrossOriginEmbedderPolicyValue.from_json(json['reportOnlyValue']), - reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None, - report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None, + reporting_endpoint=str(json['reportingEndpoint']) if json.get('reportingEndpoint', None) is not None else None, + report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if json.get('reportOnlyReportingEndpoint', None) is not None else None, ) @@ -1942,8 +1987,8 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> SecurityIsolationStatus: return cls( - coop=CrossOriginOpenerPolicyStatus.from_json(json['coop']) if 'coop' in json else None, - coep=CrossOriginEmbedderPolicyStatus.from_json(json['coep']) if 'coep' in json else None, + coop=CrossOriginOpenerPolicyStatus.from_json(json['coop']) if json.get('coop', None) is not None else None, + coep=CrossOriginEmbedderPolicyStatus.from_json(json['coep']) if json.get('coep', None) is not None else None, ) @@ -2094,11 +2139,11 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> LoadNetworkResourcePageResult: return cls( success=bool(json['success']), - net_error=float(json['netError']) if 'netError' in json else None, - net_error_name=str(json['netErrorName']) if 'netErrorName' in json else None, - http_status_code=float(json['httpStatusCode']) if 'httpStatusCode' in json else None, - stream=io.StreamHandle.from_json(json['stream']) if 'stream' in json else None, - headers=Headers.from_json(json['headers']) if 'headers' in json else None, + net_error=float(json['netError']) if json.get('netError', None) is not None else None, + net_error_name=str(json['netErrorName']) if json.get('netErrorName', None) is not None else None, + http_status_code=float(json['httpStatusCode']) if json.get('httpStatusCode', None) is not None else None, + stream=io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None, + headers=Headers.from_json(json['headers']) if json.get('headers', None) is not None else None, ) @@ -2374,10 +2419,14 @@ def enable( json = yield cmd_dict +@deprecated(version="1.3") def get_all_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Cookie]]: ''' Returns all browser cookies. Depending on the backend support, will return detailed cookie information in the ``cookies`` field. + Deprecated. Use Storage.getCookies instead. + + .. deprecated:: 1.3 :returns: Array of cookie objects. ''' @@ -2959,9 +3008,9 @@ def from_json(cls, json: T_JSON_DICT) -> LoadingFailed: timestamp=MonotonicTime.from_json(json['timestamp']), type_=ResourceType.from_json(json['type']), error_text=str(json['errorText']), - canceled=bool(json['canceled']) if 'canceled' in json else None, - blocked_reason=BlockedReason.from_json(json['blockedReason']) if 'blockedReason' in json else None, - cors_error_status=CorsErrorStatus.from_json(json['corsErrorStatus']) if 'corsErrorStatus' in json else None + canceled=bool(json['canceled']) if json.get('canceled', None) is not None else None, + blocked_reason=BlockedReason.from_json(json['blockedReason']) if json.get('blockedReason', None) is not None else None, + cors_error_status=CorsErrorStatus.from_json(json['corsErrorStatus']) if json.get('corsErrorStatus', None) is not None else None ) @@ -2987,7 +3036,7 @@ def from_json(cls, json: T_JSON_DICT) -> LoadingFinished: request_id=RequestId.from_json(json['requestId']), timestamp=MonotonicTime.from_json(json['timestamp']), encoded_data_length=float(json['encodedDataLength']), - should_report_corb_blocking=bool(json['shouldReportCorbBlocking']) if 'shouldReportCorbBlocking' in json else None + should_report_corb_blocking=bool(json['shouldReportCorbBlocking']) if json.get('shouldReportCorbBlocking', None) is not None else None ) @@ -3042,13 +3091,13 @@ def from_json(cls, json: T_JSON_DICT) -> RequestIntercepted: frame_id=page.FrameId.from_json(json['frameId']), resource_type=ResourceType.from_json(json['resourceType']), is_navigation_request=bool(json['isNavigationRequest']), - is_download=bool(json['isDownload']) if 'isDownload' in json else None, - redirect_url=str(json['redirectUrl']) if 'redirectUrl' in json else None, - auth_challenge=AuthChallenge.from_json(json['authChallenge']) if 'authChallenge' in json else None, - response_error_reason=ErrorReason.from_json(json['responseErrorReason']) if 'responseErrorReason' in json else None, - response_status_code=int(json['responseStatusCode']) if 'responseStatusCode' in json else None, - response_headers=Headers.from_json(json['responseHeaders']) if 'responseHeaders' in json else None, - request_id=RequestId.from_json(json['requestId']) if 'requestId' in json else None + is_download=bool(json['isDownload']) if json.get('isDownload', None) is not None else None, + redirect_url=str(json['redirectUrl']) if json.get('redirectUrl', None) is not None else None, + auth_challenge=AuthChallenge.from_json(json['authChallenge']) if json.get('authChallenge', None) is not None else None, + response_error_reason=ErrorReason.from_json(json['responseErrorReason']) if json.get('responseErrorReason', None) is not None else None, + response_status_code=int(json['responseStatusCode']) if json.get('responseStatusCode', None) is not None else None, + response_headers=Headers.from_json(json['responseHeaders']) if json.get('responseHeaders', None) is not None else None, + request_id=RequestId.from_json(json['requestId']) if json.get('requestId', None) is not None else None ) @@ -3112,10 +3161,10 @@ def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSent: wall_time=TimeSinceEpoch.from_json(json['wallTime']), initiator=Initiator.from_json(json['initiator']), redirect_has_extra_info=bool(json['redirectHasExtraInfo']), - redirect_response=Response.from_json(json['redirectResponse']) if 'redirectResponse' in json else None, - type_=ResourceType.from_json(json['type']) if 'type' in json else None, - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None, - has_user_gesture=bool(json['hasUserGesture']) if 'hasUserGesture' in json else None + redirect_response=Response.from_json(json['redirectResponse']) if json.get('redirectResponse', None) is not None else None, + type_=ResourceType.from_json(json['type']) if json.get('type', None) is not None else None, + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None, + has_user_gesture=bool(json['hasUserGesture']) if json.get('hasUserGesture', None) is not None else None ) @@ -3195,7 +3244,7 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceived: type_=ResourceType.from_json(json['type']), response=Response.from_json(json['response']), has_extra_info=bool(json['hasExtraInfo']), - frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None + frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None ) @@ -3236,7 +3285,7 @@ def from_json(cls, json: T_JSON_DICT) -> WebSocketCreated: return cls( request_id=RequestId.from_json(json['requestId']), url=str(json['url']), - initiator=Initiator.from_json(json['initiator']) if 'initiator' in json else None + initiator=Initiator.from_json(json['initiator']) if json.get('initiator', None) is not None else None ) @@ -3374,7 +3423,7 @@ def from_json(cls, json: T_JSON_DICT) -> WebTransportCreated: transport_id=RequestId.from_json(json['transportId']), url=str(json['url']), timestamp=MonotonicTime.from_json(json['timestamp']), - initiator=Initiator.from_json(json['initiator']) if 'initiator' in json else None + initiator=Initiator.from_json(json['initiator']) if json.get('initiator', None) is not None else None ) @@ -3438,6 +3487,8 @@ class RequestWillBeSentExtraInfo: connect_timing: ConnectTiming #: The client security state set for the request. client_security_state: typing.Optional[ClientSecurityState] + #: Whether the site has partitioned cookies stored in a partition different than the current one. + site_has_cookie_in_other_partition: typing.Optional[bool] @classmethod def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSentExtraInfo: @@ -3446,7 +3497,8 @@ def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSentExtraInfo: associated_cookies=[BlockedCookieWithReason.from_json(i) for i in json['associatedCookies']], headers=Headers.from_json(json['headers']), connect_timing=ConnectTiming.from_json(json['connectTiming']), - client_security_state=ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None + client_security_state=ClientSecurityState.from_json(json['clientSecurityState']) if json.get('clientSecurityState', None) is not None else None, + site_has_cookie_in_other_partition=bool(json['siteHasCookieInOtherPartition']) if json.get('siteHasCookieInOtherPartition', None) is not None else None ) @@ -3478,6 +3530,11 @@ class ResponseReceivedExtraInfo: #: Raw response header text as it was received over the wire. The raw text may not always be #: available, such as in the case of HTTP/2 or QUIC. headers_text: typing.Optional[str] + #: The cookie partition key that will be used to store partitioned cookies set in this response. + #: Only sent when partitioned cookies are enabled. + cookie_partition_key: typing.Optional[str] + #: True if partitioned cookies are enabled, but the partition key is not serializeable to string. + cookie_partition_key_opaque: typing.Optional[bool] @classmethod def from_json(cls, json: T_JSON_DICT) -> ResponseReceivedExtraInfo: @@ -3487,7 +3544,9 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceivedExtraInfo: headers=Headers.from_json(json['headers']), resource_ip_address_space=IPAddressSpace.from_json(json['resourceIPAddressSpace']), status_code=int(json['statusCode']), - headers_text=str(json['headersText']) if 'headersText' in json else None + headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None, + cookie_partition_key=str(json['cookiePartitionKey']) if json.get('cookiePartitionKey', None) is not None else None, + cookie_partition_key_opaque=bool(json['cookiePartitionKeyOpaque']) if json.get('cookiePartitionKeyOpaque', None) is not None else None ) @@ -3522,9 +3581,9 @@ def from_json(cls, json: T_JSON_DICT) -> TrustTokenOperationDone: status=str(json['status']), type_=TrustTokenOperationType.from_json(json['type']), request_id=RequestId.from_json(json['requestId']), - top_level_origin=str(json['topLevelOrigin']) if 'topLevelOrigin' in json else None, - issuer_origin=str(json['issuerOrigin']) if 'issuerOrigin' in json else None, - issued_token_count=int(json['issuedTokenCount']) if 'issuedTokenCount' in json else None + top_level_origin=str(json['topLevelOrigin']) if json.get('topLevelOrigin', None) is not None else None, + issuer_origin=str(json['issuerOrigin']) if json.get('issuerOrigin', None) is not None else None, + issued_token_count=int(json['issuedTokenCount']) if json.get('issuedTokenCount', None) is not None else None ) @@ -3594,7 +3653,7 @@ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseParsed return cls( inner_request_id=RequestId.from_json(json['innerRequestId']), inner_request_url=str(json['innerRequestURL']), - bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None + bundle_request_id=RequestId.from_json(json['bundleRequestId']) if json.get('bundleRequestId', None) is not None else None ) @@ -3623,7 +3682,7 @@ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseError: inner_request_id=RequestId.from_json(json['innerRequestId']), inner_request_url=str(json['innerRequestURL']), error_message=str(json['errorMessage']), - bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None + bundle_request_id=RequestId.from_json(json['bundleRequestId']) if json.get('bundleRequestId', None) is not None else None ) diff --git a/pycdp/cdp/overlay.py b/pycdp/cdp/overlay.py index 4d72822..dad0e6a 100644 --- a/pycdp/cdp/overlay.py +++ b/pycdp/cdp/overlay.py @@ -154,26 +154,26 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> GridHighlightConfig: return cls( - show_grid_extension_lines=bool(json['showGridExtensionLines']) if 'showGridExtensionLines' in json else None, - show_positive_line_numbers=bool(json['showPositiveLineNumbers']) if 'showPositiveLineNumbers' in json else None, - show_negative_line_numbers=bool(json['showNegativeLineNumbers']) if 'showNegativeLineNumbers' in json else None, - show_area_names=bool(json['showAreaNames']) if 'showAreaNames' in json else None, - show_line_names=bool(json['showLineNames']) if 'showLineNames' in json else None, - show_track_sizes=bool(json['showTrackSizes']) if 'showTrackSizes' in json else None, - grid_border_color=dom.RGBA.from_json(json['gridBorderColor']) if 'gridBorderColor' in json else None, - cell_border_color=dom.RGBA.from_json(json['cellBorderColor']) if 'cellBorderColor' in json else None, - row_line_color=dom.RGBA.from_json(json['rowLineColor']) if 'rowLineColor' in json else None, - column_line_color=dom.RGBA.from_json(json['columnLineColor']) if 'columnLineColor' in json else None, - grid_border_dash=bool(json['gridBorderDash']) if 'gridBorderDash' in json else None, - cell_border_dash=bool(json['cellBorderDash']) if 'cellBorderDash' in json else None, - row_line_dash=bool(json['rowLineDash']) if 'rowLineDash' in json else None, - column_line_dash=bool(json['columnLineDash']) if 'columnLineDash' in json else None, - row_gap_color=dom.RGBA.from_json(json['rowGapColor']) if 'rowGapColor' in json else None, - row_hatch_color=dom.RGBA.from_json(json['rowHatchColor']) if 'rowHatchColor' in json else None, - column_gap_color=dom.RGBA.from_json(json['columnGapColor']) if 'columnGapColor' in json else None, - column_hatch_color=dom.RGBA.from_json(json['columnHatchColor']) if 'columnHatchColor' in json else None, - area_border_color=dom.RGBA.from_json(json['areaBorderColor']) if 'areaBorderColor' in json else None, - grid_background_color=dom.RGBA.from_json(json['gridBackgroundColor']) if 'gridBackgroundColor' in json else None, + show_grid_extension_lines=bool(json['showGridExtensionLines']) if json.get('showGridExtensionLines', None) is not None else None, + show_positive_line_numbers=bool(json['showPositiveLineNumbers']) if json.get('showPositiveLineNumbers', None) is not None else None, + show_negative_line_numbers=bool(json['showNegativeLineNumbers']) if json.get('showNegativeLineNumbers', None) is not None else None, + show_area_names=bool(json['showAreaNames']) if json.get('showAreaNames', None) is not None else None, + show_line_names=bool(json['showLineNames']) if json.get('showLineNames', None) is not None else None, + show_track_sizes=bool(json['showTrackSizes']) if json.get('showTrackSizes', None) is not None else None, + grid_border_color=dom.RGBA.from_json(json['gridBorderColor']) if json.get('gridBorderColor', None) is not None else None, + cell_border_color=dom.RGBA.from_json(json['cellBorderColor']) if json.get('cellBorderColor', None) is not None else None, + row_line_color=dom.RGBA.from_json(json['rowLineColor']) if json.get('rowLineColor', None) is not None else None, + column_line_color=dom.RGBA.from_json(json['columnLineColor']) if json.get('columnLineColor', None) is not None else None, + grid_border_dash=bool(json['gridBorderDash']) if json.get('gridBorderDash', None) is not None else None, + cell_border_dash=bool(json['cellBorderDash']) if json.get('cellBorderDash', None) is not None else None, + row_line_dash=bool(json['rowLineDash']) if json.get('rowLineDash', None) is not None else None, + column_line_dash=bool(json['columnLineDash']) if json.get('columnLineDash', None) is not None else None, + row_gap_color=dom.RGBA.from_json(json['rowGapColor']) if json.get('rowGapColor', None) is not None else None, + row_hatch_color=dom.RGBA.from_json(json['rowHatchColor']) if json.get('rowHatchColor', None) is not None else None, + column_gap_color=dom.RGBA.from_json(json['columnGapColor']) if json.get('columnGapColor', None) is not None else None, + column_hatch_color=dom.RGBA.from_json(json['columnHatchColor']) if json.get('columnHatchColor', None) is not None else None, + area_border_color=dom.RGBA.from_json(json['areaBorderColor']) if json.get('areaBorderColor', None) is not None else None, + grid_background_color=dom.RGBA.from_json(json['gridBackgroundColor']) if json.get('gridBackgroundColor', None) is not None else None, ) @@ -229,14 +229,14 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> FlexContainerHighlightConfig: return cls( - container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None, - line_separator=LineStyle.from_json(json['lineSeparator']) if 'lineSeparator' in json else None, - item_separator=LineStyle.from_json(json['itemSeparator']) if 'itemSeparator' in json else None, - main_distributed_space=BoxStyle.from_json(json['mainDistributedSpace']) if 'mainDistributedSpace' in json else None, - cross_distributed_space=BoxStyle.from_json(json['crossDistributedSpace']) if 'crossDistributedSpace' in json else None, - row_gap_space=BoxStyle.from_json(json['rowGapSpace']) if 'rowGapSpace' in json else None, - column_gap_space=BoxStyle.from_json(json['columnGapSpace']) if 'columnGapSpace' in json else None, - cross_alignment=LineStyle.from_json(json['crossAlignment']) if 'crossAlignment' in json else None, + container_border=LineStyle.from_json(json['containerBorder']) if json.get('containerBorder', None) is not None else None, + line_separator=LineStyle.from_json(json['lineSeparator']) if json.get('lineSeparator', None) is not None else None, + item_separator=LineStyle.from_json(json['itemSeparator']) if json.get('itemSeparator', None) is not None else None, + main_distributed_space=BoxStyle.from_json(json['mainDistributedSpace']) if json.get('mainDistributedSpace', None) is not None else None, + cross_distributed_space=BoxStyle.from_json(json['crossDistributedSpace']) if json.get('crossDistributedSpace', None) is not None else None, + row_gap_space=BoxStyle.from_json(json['rowGapSpace']) if json.get('rowGapSpace', None) is not None else None, + column_gap_space=BoxStyle.from_json(json['columnGapSpace']) if json.get('columnGapSpace', None) is not None else None, + cross_alignment=LineStyle.from_json(json['crossAlignment']) if json.get('crossAlignment', None) is not None else None, ) @@ -267,9 +267,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> FlexItemHighlightConfig: return cls( - base_size_box=BoxStyle.from_json(json['baseSizeBox']) if 'baseSizeBox' in json else None, - base_size_border=LineStyle.from_json(json['baseSizeBorder']) if 'baseSizeBorder' in json else None, - flexibility_arrow=LineStyle.from_json(json['flexibilityArrow']) if 'flexibilityArrow' in json else None, + base_size_box=BoxStyle.from_json(json['baseSizeBox']) if json.get('baseSizeBox', None) is not None else None, + base_size_border=LineStyle.from_json(json['baseSizeBorder']) if json.get('baseSizeBorder', None) is not None else None, + flexibility_arrow=LineStyle.from_json(json['flexibilityArrow']) if json.get('flexibilityArrow', None) is not None else None, ) @@ -295,8 +295,8 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> LineStyle: return cls( - color=dom.RGBA.from_json(json['color']) if 'color' in json else None, - pattern=str(json['pattern']) if 'pattern' in json else None, + color=dom.RGBA.from_json(json['color']) if json.get('color', None) is not None else None, + pattern=str(json['pattern']) if json.get('pattern', None) is not None else None, ) @@ -322,8 +322,8 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> BoxStyle: return cls( - fill_color=dom.RGBA.from_json(json['fillColor']) if 'fillColor' in json else None, - hatch_color=dom.RGBA.from_json(json['hatchColor']) if 'hatchColor' in json else None, + fill_color=dom.RGBA.from_json(json['fillColor']) if json.get('fillColor', None) is not None else None, + hatch_color=dom.RGBA.from_json(json['hatchColor']) if json.get('hatchColor', None) is not None else None, ) @@ -447,31 +447,32 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> HighlightConfig: return cls( - show_info=bool(json['showInfo']) if 'showInfo' in json else None, - show_styles=bool(json['showStyles']) if 'showStyles' in json else None, - show_rulers=bool(json['showRulers']) if 'showRulers' in json else None, - show_accessibility_info=bool(json['showAccessibilityInfo']) if 'showAccessibilityInfo' in json else None, - show_extension_lines=bool(json['showExtensionLines']) if 'showExtensionLines' in json else None, - content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None, - padding_color=dom.RGBA.from_json(json['paddingColor']) if 'paddingColor' in json else None, - border_color=dom.RGBA.from_json(json['borderColor']) if 'borderColor' in json else None, - margin_color=dom.RGBA.from_json(json['marginColor']) if 'marginColor' in json else None, - event_target_color=dom.RGBA.from_json(json['eventTargetColor']) if 'eventTargetColor' in json else None, - shape_color=dom.RGBA.from_json(json['shapeColor']) if 'shapeColor' in json else None, - shape_margin_color=dom.RGBA.from_json(json['shapeMarginColor']) if 'shapeMarginColor' in json else None, - css_grid_color=dom.RGBA.from_json(json['cssGridColor']) if 'cssGridColor' in json else None, - color_format=ColorFormat.from_json(json['colorFormat']) if 'colorFormat' in json else None, - grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']) if 'gridHighlightConfig' in json else None, - flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']) if 'flexContainerHighlightConfig' in json else None, - flex_item_highlight_config=FlexItemHighlightConfig.from_json(json['flexItemHighlightConfig']) if 'flexItemHighlightConfig' in json else None, - contrast_algorithm=ContrastAlgorithm.from_json(json['contrastAlgorithm']) if 'contrastAlgorithm' in json else None, - container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']) if 'containerQueryContainerHighlightConfig' in json else None, + show_info=bool(json['showInfo']) if json.get('showInfo', None) is not None else None, + show_styles=bool(json['showStyles']) if json.get('showStyles', None) is not None else None, + show_rulers=bool(json['showRulers']) if json.get('showRulers', None) is not None else None, + show_accessibility_info=bool(json['showAccessibilityInfo']) if json.get('showAccessibilityInfo', None) is not None else None, + show_extension_lines=bool(json['showExtensionLines']) if json.get('showExtensionLines', None) is not None else None, + content_color=dom.RGBA.from_json(json['contentColor']) if json.get('contentColor', None) is not None else None, + padding_color=dom.RGBA.from_json(json['paddingColor']) if json.get('paddingColor', None) is not None else None, + border_color=dom.RGBA.from_json(json['borderColor']) if json.get('borderColor', None) is not None else None, + margin_color=dom.RGBA.from_json(json['marginColor']) if json.get('marginColor', None) is not None else None, + event_target_color=dom.RGBA.from_json(json['eventTargetColor']) if json.get('eventTargetColor', None) is not None else None, + shape_color=dom.RGBA.from_json(json['shapeColor']) if json.get('shapeColor', None) is not None else None, + shape_margin_color=dom.RGBA.from_json(json['shapeMarginColor']) if json.get('shapeMarginColor', None) is not None else None, + css_grid_color=dom.RGBA.from_json(json['cssGridColor']) if json.get('cssGridColor', None) is not None else None, + color_format=ColorFormat.from_json(json['colorFormat']) if json.get('colorFormat', None) is not None else None, + grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']) if json.get('gridHighlightConfig', None) is not None else None, + flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']) if json.get('flexContainerHighlightConfig', None) is not None else None, + flex_item_highlight_config=FlexItemHighlightConfig.from_json(json['flexItemHighlightConfig']) if json.get('flexItemHighlightConfig', None) is not None else None, + contrast_algorithm=ContrastAlgorithm.from_json(json['contrastAlgorithm']) if json.get('contrastAlgorithm', None) is not None else None, + container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']) if json.get('containerQueryContainerHighlightConfig', None) is not None else None, ) class ColorFormat(enum.Enum): RGB = "rgb" HSL = "hsl" + HWB = "hwb" HEX_ = "hex" def to_json(self) -> str: @@ -558,10 +559,10 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> ScrollSnapContainerHighlightConfig: return cls( - snapport_border=LineStyle.from_json(json['snapportBorder']) if 'snapportBorder' in json else None, - snap_area_border=LineStyle.from_json(json['snapAreaBorder']) if 'snapAreaBorder' in json else None, - scroll_margin_color=dom.RGBA.from_json(json['scrollMarginColor']) if 'scrollMarginColor' in json else None, - scroll_padding_color=dom.RGBA.from_json(json['scrollPaddingColor']) if 'scrollPaddingColor' in json else None, + snapport_border=LineStyle.from_json(json['snapportBorder']) if json.get('snapportBorder', None) is not None else None, + snap_area_border=LineStyle.from_json(json['snapAreaBorder']) if json.get('snapAreaBorder', None) is not None else None, + scroll_margin_color=dom.RGBA.from_json(json['scrollMarginColor']) if json.get('scrollMarginColor', None) is not None else None, + scroll_padding_color=dom.RGBA.from_json(json['scrollPaddingColor']) if json.get('scrollPaddingColor', None) is not None else None, ) @@ -614,8 +615,8 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> HingeConfig: return cls( rect=dom.Rect.from_json(json['rect']), - content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None, - outline_color=dom.RGBA.from_json(json['outlineColor']) if 'outlineColor' in json else None, + content_color=dom.RGBA.from_json(json['contentColor']) if json.get('contentColor', None) is not None else None, + outline_color=dom.RGBA.from_json(json['outlineColor']) if json.get('outlineColor', None) is not None else None, ) @@ -660,8 +661,8 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> ContainerQueryContainerHighlightConfig: return cls( - container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None, - descendant_border=LineStyle.from_json(json['descendantBorder']) if 'descendantBorder' in json else None, + container_border=LineStyle.from_json(json['containerBorder']) if json.get('containerBorder', None) is not None else None, + descendant_border=LineStyle.from_json(json['descendantBorder']) if json.get('descendantBorder', None) is not None else None, ) @@ -711,9 +712,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> IsolationModeHighlightConfig: return cls( - resizer_color=dom.RGBA.from_json(json['resizerColor']) if 'resizerColor' in json else None, - resizer_handle_color=dom.RGBA.from_json(json['resizerHandleColor']) if 'resizerHandleColor' in json else None, - mask_color=dom.RGBA.from_json(json['maskColor']) if 'maskColor' in json else None, + resizer_color=dom.RGBA.from_json(json['resizerColor']) if json.get('resizerColor', None) is not None else None, + resizer_handle_color=dom.RGBA.from_json(json['resizerHandleColor']) if json.get('resizerHandleColor', None) is not None else None, + mask_color=dom.RGBA.from_json(json['maskColor']) if json.get('maskColor', None) is not None else None, ) diff --git a/pycdp/cdp/page.py b/pycdp/cdp/page.py index 44e93e8..c2f3a21 100644 --- a/pycdp/cdp/page.py +++ b/pycdp/cdp/page.py @@ -84,7 +84,34 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> AdFrameStatus: return cls( ad_frame_type=AdFrameType.from_json(json['adFrameType']), - explanations=[AdFrameExplanation.from_json(i) for i in json['explanations']] if 'explanations' in json else None, + explanations=[AdFrameExplanation.from_json(i) for i in json['explanations']] if json.get('explanations', None) is not None else None, + ) + + +@dataclass +class AdScriptId: + ''' + Identifies the bottom-most script which caused the frame to be labelled + as an ad. + ''' + #: Script Id of the bottom-most script which caused the frame to be labelled + #: as an ad. + script_id: runtime.ScriptId + + #: Id of adScriptId's debugger. + debugger_id: runtime.UniqueDebuggerId + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['scriptId'] = self.script_id.to_json() + json['debuggerId'] = self.debugger_id.to_json() + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> AdScriptId: + return cls( + script_id=runtime.ScriptId.from_json(json['scriptId']), + debugger_id=runtime.UniqueDebuggerId.from_json(json['debuggerId']), ) @@ -144,13 +171,17 @@ class PermissionsPolicyFeature(enum.Enum): AMBIENT_LIGHT_SENSOR = "ambient-light-sensor" ATTRIBUTION_REPORTING = "attribution-reporting" AUTOPLAY = "autoplay" + BLUETOOTH = "bluetooth" + BROWSING_TOPICS = "browsing-topics" CAMERA = "camera" CH_DPR = "ch-dpr" CH_DEVICE_MEMORY = "ch-device-memory" CH_DOWNLINK = "ch-downlink" CH_ECT = "ch-ect" CH_PREFERS_COLOR_SCHEME = "ch-prefers-color-scheme" + CH_PREFERS_REDUCED_MOTION = "ch-prefers-reduced-motion" CH_RTT = "ch-rtt" + CH_SAVE_DATA = "ch-save-data" CH_UA = "ch-ua" CH_UA_ARCH = "ch-ua-arch" CH_UA_BITNESS = "ch-ua-bitness" @@ -166,9 +197,9 @@ class PermissionsPolicyFeature(enum.Enum): CH_VIEWPORT_HEIGHT = "ch-viewport-height" CH_VIEWPORT_WIDTH = "ch-viewport-width" CH_WIDTH = "ch-width" - CH_PARTITIONED_COOKIES = "ch-partitioned-cookies" CLIPBOARD_READ = "clipboard-read" CLIPBOARD_WRITE = "clipboard-write" + COMPUTE_PRESSURE = "compute-pressure" CROSS_ORIGIN_ISOLATED = "cross-origin-isolated" DIRECT_SOCKETS = "direct-sockets" DISPLAY_CAPTURE = "display-capture" @@ -183,26 +214,35 @@ class PermissionsPolicyFeature(enum.Enum): GEOLOCATION = "geolocation" GYROSCOPE = "gyroscope" HID = "hid" + IDENTITY_CREDENTIALS_GET = "identity-credentials-get" IDLE_DETECTION = "idle-detection" + INTEREST_COHORT = "interest-cohort" JOIN_AD_INTEREST_GROUP = "join-ad-interest-group" KEYBOARD_MAP = "keyboard-map" + LOCAL_FONTS = "local-fonts" MAGNETOMETER = "magnetometer" MICROPHONE = "microphone" MIDI = "midi" OTP_CREDENTIALS = "otp-credentials" PAYMENT = "payment" PICTURE_IN_PICTURE = "picture-in-picture" + PRIVATE_AGGREGATION = "private-aggregation" PUBLICKEY_CREDENTIALS_GET = "publickey-credentials-get" RUN_AD_AUCTION = "run-ad-auction" SCREEN_WAKE_LOCK = "screen-wake-lock" SERIAL = "serial" SHARED_AUTOFILL = "shared-autofill" - STORAGE_ACCESS_API = "storage-access-api" + SHARED_STORAGE = "shared-storage" + SHARED_STORAGE_SELECT_URL = "shared-storage-select-url" + SMART_CARD = "smart-card" + STORAGE_ACCESS = "storage-access" SYNC_XHR = "sync-xhr" TRUST_TOKEN_REDEMPTION = "trust-token-redemption" + UNLOAD = "unload" USB = "usb" VERTICAL_SCROLL = "vertical-scroll" WEB_SHARE = "web-share" + WINDOW_MANAGEMENT = "window-management" WINDOW_PLACEMENT = "window-placement" XR_SPATIAL_TRACKING = "xr-spatial-tracking" @@ -221,6 +261,7 @@ class PermissionsPolicyBlockReason(enum.Enum): HEADER = "Header" IFRAME_ATTRIBUTE = "IframeAttribute" IN_FENCED_FRAME_TREE = "InFencedFrameTree" + IN_ISOLATED_APP = "InIsolatedApp" def to_json(self) -> str: return self.value @@ -271,7 +312,7 @@ def from_json(cls, json: T_JSON_DICT) -> PermissionsPolicyFeatureState: return cls( feature=PermissionsPolicyFeature.from_json(json['feature']), allowed=bool(json['allowed']), - locator=PermissionsPolicyBlockLocator.from_json(json['locator']) if 'locator' in json else None, + locator=PermissionsPolicyBlockLocator.from_json(json['locator']) if json.get('locator', None) is not None else None, ) @@ -389,7 +430,7 @@ def from_json(cls, json: T_JSON_DICT) -> OriginTrialTokenWithStatus: return cls( raw_token_text=str(json['rawTokenText']), status=OriginTrialTokenStatus.from_json(json['status']), - parsed_token=OriginTrialToken.from_json(json['parsedToken']) if 'parsedToken' in json else None, + parsed_token=OriginTrialToken.from_json(json['parsedToken']) if json.get('parsedToken', None) is not None else None, ) @@ -502,11 +543,11 @@ def from_json(cls, json: T_JSON_DICT) -> Frame: secure_context_type=SecureContextType.from_json(json['secureContextType']), cross_origin_isolated_context_type=CrossOriginIsolatedContextType.from_json(json['crossOriginIsolatedContextType']), gated_api_features=[GatedAPIFeatures.from_json(i) for i in json['gatedAPIFeatures']], - parent_id=FrameId.from_json(json['parentId']) if 'parentId' in json else None, - name=str(json['name']) if 'name' in json else None, - url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None, - unreachable_url=str(json['unreachableUrl']) if 'unreachableUrl' in json else None, - ad_frame_status=AdFrameStatus.from_json(json['adFrameStatus']) if 'adFrameStatus' in json else None, + parent_id=FrameId.from_json(json['parentId']) if json.get('parentId', None) is not None else None, + name=str(json['name']) if json.get('name', None) is not None else None, + url_fragment=str(json['urlFragment']) if json.get('urlFragment', None) is not None else None, + unreachable_url=str(json['unreachableUrl']) if json.get('unreachableUrl', None) is not None else None, + ad_frame_status=AdFrameStatus.from_json(json['adFrameStatus']) if json.get('adFrameStatus', None) is not None else None, ) @@ -557,10 +598,10 @@ def from_json(cls, json: T_JSON_DICT) -> FrameResource: url=str(json['url']), type_=network.ResourceType.from_json(json['type']), mime_type=str(json['mimeType']), - last_modified=network.TimeSinceEpoch.from_json(json['lastModified']) if 'lastModified' in json else None, - content_size=float(json['contentSize']) if 'contentSize' in json else None, - failed=bool(json['failed']) if 'failed' in json else None, - canceled=bool(json['canceled']) if 'canceled' in json else None, + last_modified=network.TimeSinceEpoch.from_json(json['lastModified']) if json.get('lastModified', None) is not None else None, + content_size=float(json['contentSize']) if json.get('contentSize', None) is not None else None, + failed=bool(json['failed']) if json.get('failed', None) is not None else None, + canceled=bool(json['canceled']) if json.get('canceled', None) is not None else None, ) @@ -591,7 +632,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameResourceTree: return cls( frame=Frame.from_json(json['frame']), resources=[FrameResource.from_json(i) for i in json['resources']], - child_frames=[FrameResourceTree.from_json(i) for i in json['childFrames']] if 'childFrames' in json else None, + child_frames=[FrameResourceTree.from_json(i) for i in json['childFrames']] if json.get('childFrames', None) is not None else None, ) @@ -617,7 +658,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> FrameTree: return cls( frame=Frame.from_json(json['frame']), - child_frames=[FrameTree.from_json(i) for i in json['childFrames']] if 'childFrames' in json else None, + child_frames=[FrameTree.from_json(i) for i in json['childFrames']] if json.get('childFrames', None) is not None else None, ) @@ -749,7 +790,7 @@ def from_json(cls, json: T_JSON_DICT) -> ScreencastFrameMetadata: device_height=float(json['deviceHeight']), scroll_offset_x=float(json['scrollOffsetX']), scroll_offset_y=float(json['scrollOffsetY']), - timestamp=network.TimeSinceEpoch.from_json(json['timestamp']) if 'timestamp' in json else None, + timestamp=network.TimeSinceEpoch.from_json(json['timestamp']) if json.get('timestamp', None) is not None else None, ) @@ -912,7 +953,7 @@ def from_json(cls, json: T_JSON_DICT) -> VisualViewport: client_width=float(json['clientWidth']), client_height=float(json['clientHeight']), scale=float(json['scale']), - zoom=float(json['zoom']) if 'zoom' in json else None, + zoom=float(json['zoom']) if json.get('zoom', None) is not None else None, ) @@ -979,8 +1020,8 @@ class FontFamilies: #: The fantasy font-family. fantasy: typing.Optional[str] = None - #: The pictograph font-family. - pictograph: typing.Optional[str] = None + #: The math font-family. + math: typing.Optional[str] = None def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() @@ -996,20 +1037,20 @@ def to_json(self) -> T_JSON_DICT: json['cursive'] = self.cursive if self.fantasy is not None: json['fantasy'] = self.fantasy - if self.pictograph is not None: - json['pictograph'] = self.pictograph + if self.math is not None: + json['math'] = self.math return json @classmethod def from_json(cls, json: T_JSON_DICT) -> FontFamilies: return cls( - standard=str(json['standard']) if 'standard' in json else None, - fixed=str(json['fixed']) if 'fixed' in json else None, - serif=str(json['serif']) if 'serif' in json else None, - sans_serif=str(json['sansSerif']) if 'sansSerif' in json else None, - cursive=str(json['cursive']) if 'cursive' in json else None, - fantasy=str(json['fantasy']) if 'fantasy' in json else None, - pictograph=str(json['pictograph']) if 'pictograph' in json else None, + standard=str(json['standard']) if json.get('standard', None) is not None else None, + fixed=str(json['fixed']) if json.get('fixed', None) is not None else None, + serif=str(json['serif']) if json.get('serif', None) is not None else None, + sans_serif=str(json['sansSerif']) if json.get('sansSerif', None) is not None else None, + cursive=str(json['cursive']) if json.get('cursive', None) is not None else None, + fantasy=str(json['fantasy']) if json.get('fantasy', None) is not None else None, + math=str(json['math']) if json.get('math', None) is not None else None, ) @@ -1060,8 +1101,8 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> FontSizes: return cls( - standard=int(json['standard']) if 'standard' in json else None, - fixed=int(json['fixed']) if 'fixed' in json else None, + standard=int(json['standard']) if json.get('standard', None) is not None else None, + fixed=int(json['fixed']) if json.get('fixed', None) is not None else None, ) @@ -1188,10 +1229,27 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> CompilationCacheParams: return cls( url=str(json['url']), - eager=bool(json['eager']) if 'eager' in json else None, + eager=bool(json['eager']) if json.get('eager', None) is not None else None, ) +class AutoResponseMode(enum.Enum): + ''' + Enum of possible auto-reponse for permisison / prompt dialogs. + ''' + NONE = "none" + AUTO_ACCEPT = "autoAccept" + AUTO_REJECT = "autoReject" + AUTO_OPT_OUT = "autoOptOut" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> AutoResponseMode: + return cls(json) + + class NavigationType(enum.Enum): ''' The type of a frameNavigated event. @@ -1227,7 +1285,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum): JAVA_SCRIPT_EXECUTION = "JavaScriptExecution" RENDERER_PROCESS_KILLED = "RendererProcessKilled" RENDERER_PROCESS_CRASHED = "RendererProcessCrashed" - GRANTED_MEDIA_STREAM_ACCESS = "GrantedMediaStreamAccess" SCHEDULER_TRACKED_FEATURE_USED = "SchedulerTrackedFeatureUsed" CONFLICTING_BROWSING_INSTANCE = "ConflictingBrowsingInstance" CACHE_FLUSHED = "CacheFlushed" @@ -1254,7 +1311,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum): FOREGROUND_CACHE_LIMIT = "ForegroundCacheLimit" BROWSING_INSTANCE_NOT_SWAPPED = "BrowsingInstanceNotSwapped" BACK_FORWARD_CACHE_DISABLED_FOR_DELEGATE = "BackForwardCacheDisabledForDelegate" - OPT_IN_UNLOAD_HEADER_NOT_PRESENT = "OptInUnloadHeaderNotPresent" UNLOAD_HANDLER_EXISTS_IN_MAIN_FRAME = "UnloadHandlerExistsInMainFrame" UNLOAD_HANDLER_EXISTS_IN_SUB_FRAME = "UnloadHandlerExistsInSubFrame" SERVICE_WORKER_UNREGISTRATION = "ServiceWorkerUnregistration" @@ -1264,6 +1320,8 @@ class BackForwardCacheNotRestoredReason(enum.Enum): NO_RESPONSE_HEAD = "NoResponseHead" UNKNOWN = "Unknown" ACTIVATION_NAVIGATIONS_DISALLOWED_FOR_BUG1234857 = "ActivationNavigationsDisallowedForBug1234857" + ERROR_DOCUMENT = "ErrorDocument" + FENCED_FRAMES_EMBEDDER = "FencedFramesEmbedder" WEB_SOCKET = "WebSocket" WEB_TRANSPORT = "WebTransport" WEB_RTC = "WebRTC" @@ -1276,7 +1334,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum): DEDICATED_WORKER_OR_WORKLET = "DedicatedWorkerOrWorklet" OUTSTANDING_NETWORK_REQUEST_OTHERS = "OutstandingNetworkRequestOthers" OUTSTANDING_INDEXED_DB_TRANSACTION = "OutstandingIndexedDBTransaction" - REQUESTED_NOTIFICATIONS_PERMISSION = "RequestedNotificationsPermission" REQUESTED_MIDI_PERMISSION = "RequestedMIDIPermission" REQUESTED_AUDIO_CAPTURE_PERMISSION = "RequestedAudioCapturePermission" REQUESTED_VIDEO_CAPTURE_PERMISSION = "RequestedVideoCapturePermission" @@ -1307,7 +1364,10 @@ class BackForwardCacheNotRestoredReason(enum.Enum): OUTSTANDING_NETWORK_REQUEST_DIRECT_SOCKET = "OutstandingNetworkRequestDirectSocket" INJECTED_JAVASCRIPT = "InjectedJavascript" INJECTED_STYLE_SHEET = "InjectedStyleSheet" + KEEPALIVE_REQUEST = "KeepaliveRequest" + INDEXED_DB_EVENT = "IndexedDBEvent" DUMMY = "Dummy" + AUTHORIZATION_HEADER = "AuthorizationHeader" CONTENT_SECURITY_HANDLER = "ContentSecurityHandler" CONTENT_WEB_AUTHENTICATION_API = "ContentWebAuthenticationAPI" CONTENT_FILE_CHOOSER = "ContentFileChooser" @@ -1316,7 +1376,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum): CONTENT_MEDIA_DEVICES_DISPATCHER_HOST = "ContentMediaDevicesDispatcherHost" CONTENT_WEB_BLUETOOTH = "ContentWebBluetooth" CONTENT_WEB_USB = "ContentWebUSB" - CONTENT_MEDIA_SESSION = "ContentMediaSession" CONTENT_MEDIA_SESSION_SERVICE = "ContentMediaSessionService" CONTENT_SCREEN_READER = "ContentScreenReader" EMBEDDER_POPUP_BLOCKER_TAB_HELPER = "EmbedderPopupBlockerTabHelper" @@ -1367,10 +1426,17 @@ class BackForwardCacheNotRestoredExplanation: #: Not restored reason reason: BackForwardCacheNotRestoredReason + #: Context associated with the reason. The meaning of this context is + #: dependent on the reason: + #: - EmbedderExtensionSentMessageToCachedFrame: the extension ID. + context: typing.Optional[str] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['type'] = self.type_.to_json() json['reason'] = self.reason.to_json() + if self.context is not None: + json['context'] = self.context return json @classmethod @@ -1378,6 +1444,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotRestoredExplanation: return cls( type_=BackForwardCacheNotRestoredReasonType.from_json(json['type']), reason=BackForwardCacheNotRestoredReason.from_json(json['reason']), + context=str(json['context']) if json.get('context', None) is not None else None, ) @@ -1474,7 +1541,8 @@ def capture_screenshot( quality: typing.Optional[int] = None, clip: typing.Optional[Viewport] = None, from_surface: typing.Optional[bool] = None, - capture_beyond_viewport: typing.Optional[bool] = None + capture_beyond_viewport: typing.Optional[bool] = None, + optimize_for_speed: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]: ''' Capture page screenshot. @@ -1484,6 +1552,7 @@ def capture_screenshot( :param clip: *(Optional)* Capture the screenshot of a given region only. :param from_surface: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot from the surface, rather than the view. Defaults to true. :param capture_beyond_viewport: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot beyond the viewport. Defaults to false. + :param optimize_for_speed: **(EXPERIMENTAL)** *(Optional)* Optimize image encoding for speed, not for resulting size (defaults to false) :returns: Base64-encoded image data. (Encoded as a base64 string when passed over JSON) ''' params: T_JSON_DICT = dict() @@ -1497,6 +1566,8 @@ def capture_screenshot( params['fromSurface'] = from_surface if capture_beyond_viewport is not None: params['captureBeyondViewport'] = capture_beyond_viewport + if optimize_for_speed is not None: + params['optimizeForSpeed'] = optimize_for_speed cmd_dict: T_JSON_DICT = { 'method': 'Page.captureScreenshot', 'params': params, @@ -1661,8 +1732,8 @@ def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[ return ( str(json['url']), [AppManifestError.from_json(i) for i in json['errors']], - str(json['data']) if 'data' in json else None, - AppManifestParsedProperties.from_json(json['parsed']) if 'parsed' in json else None + str(json['data']) if json.get('data', None) is not None else None, + AppManifestParsedProperties.from_json(json['parsed']) if json.get('parsed', None) is not None else None ) @@ -1681,9 +1752,12 @@ def get_installability_errors() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typi return [InstallabilityError.from_json(i) for i in json['installabilityErrors']] +@deprecated(version="1.3") def get_manifest_icons() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[str]]: ''' + Deprecated because it's not guaranteed that the returned icon is in fact the one used for PWA installation. + .. deprecated:: 1.3 **EXPERIMENTAL** @@ -1693,7 +1767,7 @@ def get_manifest_icons() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Opti 'method': 'Page.getManifestIcons', } json = yield cmd_dict - return str(json['primaryIcon']) if 'primaryIcon' in json else None + return str(json['primaryIcon']) if json.get('primaryIcon', None) is not None else None def get_app_id() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[str], typing.Optional[str]]]: @@ -1713,16 +1787,38 @@ def get_app_id() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing } json = yield cmd_dict return ( - str(json['appId']) if 'appId' in json else None, - str(json['recommendedId']) if 'recommendedId' in json else None + str(json['appId']) if json.get('appId', None) is not None else None, + str(json['recommendedId']) if json.get('recommendedId', None) is not None else None ) +def get_ad_script_id( + frame_id: FrameId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[AdScriptId]]: + ''' + + + **EXPERIMENTAL** + + :param frame_id: + :returns: *(Optional)* Identifies the bottom-most script which caused the frame to be labelled as an ad. Only sent if frame is labelled as an ad and id is available. + ''' + params: T_JSON_DICT = dict() + params['frameId'] = frame_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'Page.getAdScriptId', + 'params': params, + } + json = yield cmd_dict + return AdScriptId.from_json(json['adScriptId']) if json.get('adScriptId', None) is not None else None + + @deprecated(version="1.3") def get_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[network.Cookie]]: ''' - Returns all browser cookies. Depending on the backend support, will return detailed cookie - information in the ``cookies`` field. + Returns all browser cookies for the page and all of its subframes. Depending + on the backend support, will return detailed cookie information in the + ``cookies`` field. .. deprecated:: 1.3 @@ -1756,9 +1852,9 @@ def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tupl :returns: A tuple with the following items: - 0. **layoutViewport** - Deprecated metrics relating to the layout viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssLayoutViewport`` instead. - 1. **visualViewport** - Deprecated metrics relating to the visual viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssVisualViewport`` instead. - 2. **contentSize** - Deprecated size of scrollable area. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssContentSize`` instead. + 0. **layoutViewport** - Deprecated metrics relating to the layout viewport. Is in device pixels. Use ``cssLayoutViewport`` instead. + 1. **visualViewport** - Deprecated metrics relating to the visual viewport. Is in device pixels. Use ``cssVisualViewport`` instead. + 2. **contentSize** - Deprecated size of scrollable area. Is in DP. Use ``cssContentSize`` instead. 3. **cssLayoutViewport** - Metrics relating to the layout viewport in CSS pixels. 4. **cssVisualViewport** - Metrics relating to the visual viewport in CSS pixels. 5. **cssContentSize** - Size of scrollable area in CSS pixels. @@ -1890,7 +1986,7 @@ def navigate( :returns: A tuple with the following items: 0. **frameId** - Frame id that has navigated (or failed to navigate) - 1. **loaderId** - *(Optional)* Loader identifier. + 1. **loaderId** - *(Optional)* Loader identifier. This is omitted in case of same-document navigation, as the previously committed loaderId would not change. 2. **errorText** - *(Optional)* User friendly error message, present if and only if navigation has failed. ''' params: T_JSON_DICT = dict() @@ -1910,8 +2006,8 @@ def navigate( json = yield cmd_dict return ( FrameId.from_json(json['frameId']), - network.LoaderId.from_json(json['loaderId']) if 'loaderId' in json else None, - str(json['errorText']) if 'errorText' in json else None + network.LoaderId.from_json(json['loaderId']) if json.get('loaderId', None) is not None else None, + str(json['errorText']) if json.get('errorText', None) is not None else None ) @@ -1944,7 +2040,6 @@ def print_to_pdf( margin_left: typing.Optional[float] = None, margin_right: typing.Optional[float] = None, page_ranges: typing.Optional[str] = None, - ignore_invalid_page_ranges: typing.Optional[bool] = None, header_template: typing.Optional[str] = None, footer_template: typing.Optional[str] = None, prefer_css_page_size: typing.Optional[bool] = None, @@ -1963,8 +2058,7 @@ def print_to_pdf( :param margin_bottom: *(Optional)* Bottom margin in inches. Defaults to 1cm (~0.4 inches). :param margin_left: *(Optional)* Left margin in inches. Defaults to 1cm (~0.4 inches). :param margin_right: *(Optional)* Right margin in inches. Defaults to 1cm (~0.4 inches). - :param page_ranges: *(Optional)* Paper ranges to print, e.g., '1-5, 8, 11-13'. Defaults to the empty string, which means print all pages. - :param ignore_invalid_page_ranges: *(Optional)* Whether to silently ignore invalid but successfully parsed page ranges, such as '3-2'. Defaults to false. + :param page_ranges: *(Optional)* Paper ranges to print, one based, e.g., '1-5, 8, 11-13'. Pages are printed in the document order, not in the order specified, and no more than once. Defaults to empty string, which implies the entire document is printed. The page numbers are quietly capped to actual page count of the document, and ranges beyond the end of the document are ignored. If this results in no pages to print, an error is reported. It is an error to specify a range with start greater than end. :param header_template: *(Optional)* HTML template for the print header. Should be valid HTML markup with following classes used to inject printing values into them: - ```date````: formatted print date - ````title````: document title - ````url````: document location - ````pageNumber````: current page number - ````totalPages````: total pages in the document For example, ```````` would generate span containing the title. :param footer_template: *(Optional)* HTML template for the print footer. Should use the same format as the ````headerTemplate````. :param prefer_css_page_size: *(Optional)* Whether or not to prefer page size as defined by css. Defaults to false, in which case the content will be scaled to fit the paper size. @@ -1997,8 +2091,6 @@ def print_to_pdf( params['marginRight'] = margin_right if page_ranges is not None: params['pageRanges'] = page_ranges - if ignore_invalid_page_ranges is not None: - params['ignoreInvalidPageRanges'] = ignore_invalid_page_ranges if header_template is not None: params['headerTemplate'] = header_template if footer_template is not None: @@ -2014,7 +2106,7 @@ def print_to_pdf( json = yield cmd_dict return ( str(json['data']), - io.StreamHandle.from_json(json['stream']) if 'stream' in json else None + io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None ) @@ -2632,7 +2724,7 @@ def clear_compilation_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: def set_spc_transaction_mode( - mode: str + mode: AutoResponseMode ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Sets the Secure Payment Confirmation transaction mode. @@ -2643,7 +2735,7 @@ def set_spc_transaction_mode( :param mode: ''' params: T_JSON_DICT = dict() - params['mode'] = mode + params['mode'] = mode.to_json() cmd_dict: T_JSON_DICT = { 'method': 'Page.setSPCTransactionMode', 'params': params, @@ -2651,6 +2743,26 @@ def set_spc_transaction_mode( json = yield cmd_dict +def set_rph_registration_mode( + mode: AutoResponseMode + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Extensions for Custom Handlers API: + https://html.spec.whatwg.org/multipage/system-state.html#rph-automation + + **EXPERIMENTAL** + + :param mode: + ''' + params: T_JSON_DICT = dict() + params['mode'] = mode.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'Page.setRPHRegistrationMode', + 'params': params, + } + json = yield cmd_dict + + def generate_test_report( message: str, group: typing.Optional[str] = None @@ -2727,17 +2839,17 @@ class FileChooserOpened: ''' #: Id of the frame containing input node. frame_id: FrameId - #: Input node id. - backend_node_id: dom.BackendNodeId #: Input mode. mode: str + #: Input node id. Only present for file choosers opened via an element. + backend_node_id: typing.Optional[dom.BackendNodeId] @classmethod def from_json(cls, json: T_JSON_DICT) -> FileChooserOpened: return cls( frame_id=FrameId.from_json(json['frameId']), - backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']), - mode=str(json['mode']) + mode=str(json['mode']), + backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None ) @@ -2759,7 +2871,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameAttached: return cls( frame_id=FrameId.from_json(json['frameId']), parent_frame_id=FrameId.from_json(json['parentFrameId']), - stack=runtime.StackTrace.from_json(json['stack']) if 'stack' in json else None + stack=runtime.StackTrace.from_json(json['stack']) if json.get('stack', None) is not None else None ) @@ -3077,7 +3189,7 @@ def from_json(cls, json: T_JSON_DICT) -> JavascriptDialogOpening: message=str(json['message']), type_=DialogType.from_json(json['type']), has_browser_handler=bool(json['hasBrowserHandler']), - default_prompt=str(json['defaultPrompt']) if 'defaultPrompt' in json else None + default_prompt=str(json['defaultPrompt']) if json.get('defaultPrompt', None) is not None else None ) @@ -3130,7 +3242,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotUsed: loader_id=network.LoaderId.from_json(json['loaderId']), frame_id=FrameId.from_json(json['frameId']), not_restored_explanations=[BackForwardCacheNotRestoredExplanation.from_json(i) for i in json['notRestoredExplanations']], - not_restored_explanations_tree=BackForwardCacheNotRestoredExplanationTree.from_json(json['notRestoredExplanationsTree']) if 'notRestoredExplanationsTree' in json else None + not_restored_explanations_tree=BackForwardCacheNotRestoredExplanationTree.from_json(json['notRestoredExplanationsTree']) if json.get('notRestoredExplanationsTree', None) is not None else None ) diff --git a/pycdp/cdp/performance_timeline.py b/pycdp/cdp/performance_timeline.py index 408849d..e22e149 100644 --- a/pycdp/cdp/performance_timeline.py +++ b/pycdp/cdp/performance_timeline.py @@ -55,9 +55,9 @@ def from_json(cls, json: T_JSON_DICT) -> LargestContentfulPaint: render_time=network.TimeSinceEpoch.from_json(json['renderTime']), load_time=network.TimeSinceEpoch.from_json(json['loadTime']), size=float(json['size']), - element_id=str(json['elementId']) if 'elementId' in json else None, - url=str(json['url']) if 'url' in json else None, - node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None, + element_id=str(json['elementId']) if json.get('elementId', None) is not None else None, + url=str(json['url']) if json.get('url', None) is not None else None, + node_id=dom.BackendNodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None, ) @@ -82,7 +82,7 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutShiftAttribution: return cls( previous_rect=dom.Rect.from_json(json['previousRect']), current_rect=dom.Rect.from_json(json['currentRect']), - node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None, + node_id=dom.BackendNodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None, ) @@ -161,9 +161,9 @@ def from_json(cls, json: T_JSON_DICT) -> TimelineEvent: type_=str(json['type']), name=str(json['name']), time=network.TimeSinceEpoch.from_json(json['time']), - duration=float(json['duration']) if 'duration' in json else None, - lcp_details=LargestContentfulPaint.from_json(json['lcpDetails']) if 'lcpDetails' in json else None, - layout_shift_details=LayoutShift.from_json(json['layoutShiftDetails']) if 'layoutShiftDetails' in json else None, + duration=float(json['duration']) if json.get('duration', None) is not None else None, + lcp_details=LargestContentfulPaint.from_json(json['lcpDetails']) if json.get('lcpDetails', None) is not None else None, + layout_shift_details=LayoutShift.from_json(json['layoutShiftDetails']) if json.get('layoutShiftDetails', None) is not None else None, ) diff --git a/pycdp/cdp/profiler.py b/pycdp/cdp/profiler.py index 0dc100f..8f7d5ab 100644 --- a/pycdp/cdp/profiler.py +++ b/pycdp/cdp/profiler.py @@ -58,10 +58,10 @@ def from_json(cls, json: T_JSON_DICT) -> ProfileNode: return cls( id_=int(json['id']), call_frame=runtime.CallFrame.from_json(json['callFrame']), - hit_count=int(json['hitCount']) if 'hitCount' in json else None, - children=[int(i) for i in json['children']] if 'children' in json else None, - deopt_reason=str(json['deoptReason']) if 'deoptReason' in json else None, - position_ticks=[PositionTickInfo.from_json(i) for i in json['positionTicks']] if 'positionTicks' in json else None, + hit_count=int(json['hitCount']) if json.get('hitCount', None) is not None else None, + children=[int(i) for i in json['children']] if json.get('children', None) is not None else None, + deopt_reason=str(json['deoptReason']) if json.get('deoptReason', None) is not None else None, + position_ticks=[PositionTickInfo.from_json(i) for i in json['positionTicks']] if json.get('positionTicks', None) is not None else None, ) @@ -103,8 +103,8 @@ def from_json(cls, json: T_JSON_DICT) -> Profile: nodes=[ProfileNode.from_json(i) for i in json['nodes']], start_time=float(json['startTime']), end_time=float(json['endTime']), - samples=[int(i) for i in json['samples']] if 'samples' in json else None, - time_deltas=[int(i) for i in json['timeDeltas']] if 'timeDeltas' in json else None, + samples=[int(i) for i in json['samples']] if json.get('samples', None) is not None else None, + time_deltas=[int(i) for i in json['timeDeltas']] if json.get('timeDeltas', None) is not None else None, ) @@ -223,81 +223,6 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptCoverage: ) -@dataclass -class TypeObject: - ''' - Describes a type collected during runtime. - ''' - #: Name of a type collected with type profiling. - name: str - - def to_json(self) -> T_JSON_DICT: - json: T_JSON_DICT = dict() - json['name'] = self.name - return json - - @classmethod - def from_json(cls, json: T_JSON_DICT) -> TypeObject: - return cls( - name=str(json['name']), - ) - - -@dataclass -class TypeProfileEntry: - ''' - Source offset and types for a parameter or return value. - ''' - #: Source offset of the parameter or end of function for return values. - offset: int - - #: The types for this parameter or return value. - types: typing.List[TypeObject] - - def to_json(self) -> T_JSON_DICT: - json: T_JSON_DICT = dict() - json['offset'] = self.offset - json['types'] = [i.to_json() for i in self.types] - return json - - @classmethod - def from_json(cls, json: T_JSON_DICT) -> TypeProfileEntry: - return cls( - offset=int(json['offset']), - types=[TypeObject.from_json(i) for i in json['types']], - ) - - -@dataclass -class ScriptTypeProfile: - ''' - Type profile data collected during runtime for a JavaScript script. - ''' - #: JavaScript script id. - script_id: runtime.ScriptId - - #: JavaScript script name or url. - url: str - - #: Type profile entries for parameters and return values of the functions in the script. - entries: typing.List[TypeProfileEntry] - - def to_json(self) -> T_JSON_DICT: - json: T_JSON_DICT = dict() - json['scriptId'] = self.script_id.to_json() - json['url'] = self.url - json['entries'] = [i.to_json() for i in self.entries] - return json - - @classmethod - def from_json(cls, json: T_JSON_DICT) -> ScriptTypeProfile: - return cls( - script_id=runtime.ScriptId.from_json(json['scriptId']), - url=str(json['url']), - entries=[TypeProfileEntry.from_json(i) for i in json['entries']], - ) - - def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: cmd_dict: T_JSON_DICT = { @@ -383,18 +308,6 @@ def start_precise_coverage( return float(json['timestamp']) -def start_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: - ''' - Enable type profile. - - **EXPERIMENTAL** - ''' - cmd_dict: T_JSON_DICT = { - 'method': 'Profiler.startTypeProfile', - } - json = yield cmd_dict - - def stop() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,Profile]: ''' @@ -419,18 +332,6 @@ def stop_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: json = yield cmd_dict -def stop_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: - ''' - Disable type profile. Disabling releases type profile data collected so far. - - **EXPERIMENTAL** - ''' - cmd_dict: T_JSON_DICT = { - 'method': 'Profiler.stopTypeProfile', - } - json = yield cmd_dict - - def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[ScriptCoverage], float]]: ''' Collect coverage data for the current isolate, and resets execution counters. Precise code @@ -451,21 +352,6 @@ def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.T ) -def take_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ScriptTypeProfile]]: - ''' - Collect type profile. - - **EXPERIMENTAL** - - :returns: Type profile for all scripts since startTypeProfile() was turned on. - ''' - cmd_dict: T_JSON_DICT = { - 'method': 'Profiler.takeTypeProfile', - } - json = yield cmd_dict - return [ScriptTypeProfile.from_json(i) for i in json['result']] - - @event_class('Profiler.consoleProfileFinished') @dataclass class ConsoleProfileFinished: @@ -482,7 +368,7 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleProfileFinished: id_=str(json['id']), location=debugger.Location.from_json(json['location']), profile=Profile.from_json(json['profile']), - title=str(json['title']) if 'title' in json else None + title=str(json['title']) if json.get('title', None) is not None else None ) @@ -503,7 +389,7 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleProfileStarted: return cls( id_=str(json['id']), location=debugger.Location.from_json(json['location']), - title=str(json['title']) if 'title' in json else None + title=str(json['title']) if json.get('title', None) is not None else None ) diff --git a/pycdp/cdp/runtime.py b/pycdp/cdp/runtime.py index 043a762..e9d1550 100644 --- a/pycdp/cdp/runtime.py +++ b/pycdp/cdp/runtime.py @@ -27,6 +27,36 @@ def __repr__(self): return 'ScriptId({})'.format(super().__repr__()) +@dataclass +class WebDriverValue: + ''' + Represents the value serialiazed by the WebDriver BiDi specification + https://w3c.github.io/webdriver-bidi. + ''' + type_: str + + value: typing.Optional[typing.Any] = None + + object_id: typing.Optional[str] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['type'] = self.type_ + if self.value is not None: + json['value'] = self.value + if self.object_id is not None: + json['objectId'] = self.object_id + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> WebDriverValue: + return cls( + type_=str(json['type']), + value=json['value'] if json.get('value', None) is not None else None, + object_id=str(json['objectId']) if json.get('objectId', None) is not None else None, + ) + + class RemoteObjectId(str): ''' Unique object identifier. @@ -84,6 +114,9 @@ class RemoteObject: #: String representation of the object. description: typing.Optional[str] = None + #: WebDriver BiDi representation of the value. + web_driver_value: typing.Optional[WebDriverValue] = None + #: Unique object identifier (for non-primitive values). object_id: typing.Optional[RemoteObjectId] = None @@ -105,6 +138,8 @@ def to_json(self) -> T_JSON_DICT: json['unserializableValue'] = self.unserializable_value.to_json() if self.description is not None: json['description'] = self.description + if self.web_driver_value is not None: + json['webDriverValue'] = self.web_driver_value.to_json() if self.object_id is not None: json['objectId'] = self.object_id.to_json() if self.preview is not None: @@ -117,14 +152,15 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> RemoteObject: return cls( type_=str(json['type']), - subtype=str(json['subtype']) if 'subtype' in json else None, - class_name=str(json['className']) if 'className' in json else None, - value=json['value'] if 'value' in json else None, - unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if 'unserializableValue' in json else None, - description=str(json['description']) if 'description' in json else None, - object_id=RemoteObjectId.from_json(json['objectId']) if 'objectId' in json else None, - preview=ObjectPreview.from_json(json['preview']) if 'preview' in json else None, - custom_preview=CustomPreview.from_json(json['customPreview']) if 'customPreview' in json else None, + subtype=str(json['subtype']) if json.get('subtype', None) is not None else None, + class_name=str(json['className']) if json.get('className', None) is not None else None, + value=json['value'] if json.get('value', None) is not None else None, + unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if json.get('unserializableValue', None) is not None else None, + description=str(json['description']) if json.get('description', None) is not None else None, + web_driver_value=WebDriverValue.from_json(json['webDriverValue']) if json.get('webDriverValue', None) is not None else None, + object_id=RemoteObjectId.from_json(json['objectId']) if json.get('objectId', None) is not None else None, + preview=ObjectPreview.from_json(json['preview']) if json.get('preview', None) is not None else None, + custom_preview=CustomPreview.from_json(json['customPreview']) if json.get('customPreview', None) is not None else None, ) @@ -150,7 +186,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> CustomPreview: return cls( header=str(json['header']), - body_getter_id=RemoteObjectId.from_json(json['bodyGetterId']) if 'bodyGetterId' in json else None, + body_getter_id=RemoteObjectId.from_json(json['bodyGetterId']) if json.get('bodyGetterId', None) is not None else None, ) @@ -196,9 +232,9 @@ def from_json(cls, json: T_JSON_DICT) -> ObjectPreview: type_=str(json['type']), overflow=bool(json['overflow']), properties=[PropertyPreview.from_json(i) for i in json['properties']], - subtype=str(json['subtype']) if 'subtype' in json else None, - description=str(json['description']) if 'description' in json else None, - entries=[EntryPreview.from_json(i) for i in json['entries']] if 'entries' in json else None, + subtype=str(json['subtype']) if json.get('subtype', None) is not None else None, + description=str(json['description']) if json.get('description', None) is not None else None, + entries=[EntryPreview.from_json(i) for i in json['entries']] if json.get('entries', None) is not None else None, ) @@ -236,9 +272,9 @@ def from_json(cls, json: T_JSON_DICT) -> PropertyPreview: return cls( name=str(json['name']), type_=str(json['type']), - value=str(json['value']) if 'value' in json else None, - value_preview=ObjectPreview.from_json(json['valuePreview']) if 'valuePreview' in json else None, - subtype=str(json['subtype']) if 'subtype' in json else None, + value=str(json['value']) if json.get('value', None) is not None else None, + value_preview=ObjectPreview.from_json(json['valuePreview']) if json.get('valuePreview', None) is not None else None, + subtype=str(json['subtype']) if json.get('subtype', None) is not None else None, ) @@ -261,7 +297,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> EntryPreview: return cls( value=ObjectPreview.from_json(json['value']), - key=ObjectPreview.from_json(json['key']) if 'key' in json else None, + key=ObjectPreview.from_json(json['key']) if json.get('key', None) is not None else None, ) @@ -331,13 +367,13 @@ def from_json(cls, json: T_JSON_DICT) -> PropertyDescriptor: name=str(json['name']), configurable=bool(json['configurable']), enumerable=bool(json['enumerable']), - value=RemoteObject.from_json(json['value']) if 'value' in json else None, - writable=bool(json['writable']) if 'writable' in json else None, - get=RemoteObject.from_json(json['get']) if 'get' in json else None, - set_=RemoteObject.from_json(json['set']) if 'set' in json else None, - was_thrown=bool(json['wasThrown']) if 'wasThrown' in json else None, - is_own=bool(json['isOwn']) if 'isOwn' in json else None, - symbol=RemoteObject.from_json(json['symbol']) if 'symbol' in json else None, + value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None, + writable=bool(json['writable']) if json.get('writable', None) is not None else None, + get=RemoteObject.from_json(json['get']) if json.get('get', None) is not None else None, + set_=RemoteObject.from_json(json['set']) if json.get('set', None) is not None else None, + was_thrown=bool(json['wasThrown']) if json.get('wasThrown', None) is not None else None, + is_own=bool(json['isOwn']) if json.get('isOwn', None) is not None else None, + symbol=RemoteObject.from_json(json['symbol']) if json.get('symbol', None) is not None else None, ) @@ -363,7 +399,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> InternalPropertyDescriptor: return cls( name=str(json['name']), - value=RemoteObject.from_json(json['value']) if 'value' in json else None, + value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None, ) @@ -401,9 +437,9 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> PrivatePropertyDescriptor: return cls( name=str(json['name']), - value=RemoteObject.from_json(json['value']) if 'value' in json else None, - get=RemoteObject.from_json(json['get']) if 'get' in json else None, - set_=RemoteObject.from_json(json['set']) if 'set' in json else None, + value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None, + get=RemoteObject.from_json(json['get']) if json.get('get', None) is not None else None, + set_=RemoteObject.from_json(json['set']) if json.get('set', None) is not None else None, ) @@ -435,9 +471,9 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> CallArgument: return cls( - value=json['value'] if 'value' in json else None, - unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if 'unserializableValue' in json else None, - object_id=RemoteObjectId.from_json(json['objectId']) if 'objectId' in json else None, + value=json['value'] if json.get('value', None) is not None else None, + unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if json.get('unserializableValue', None) is not None else None, + object_id=RemoteObjectId.from_json(json['objectId']) if json.get('objectId', None) is not None else None, ) @@ -496,7 +532,7 @@ def from_json(cls, json: T_JSON_DICT) -> ExecutionContextDescription: origin=str(json['origin']), name=str(json['name']), unique_id=str(json['uniqueId']), - aux_data=dict(json['auxData']) if 'auxData' in json else None, + aux_data=dict(json['auxData']) if json.get('auxData', None) is not None else None, ) @@ -565,12 +601,12 @@ def from_json(cls, json: T_JSON_DICT) -> ExceptionDetails: text=str(json['text']), line_number=int(json['lineNumber']), column_number=int(json['columnNumber']), - script_id=ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None, - url=str(json['url']) if 'url' in json else None, - stack_trace=StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None, - exception=RemoteObject.from_json(json['exception']) if 'exception' in json else None, - execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None, - exception_meta_data=dict(json['exceptionMetaData']) if 'exceptionMetaData' in json else None, + script_id=ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None, + url=str(json['url']) if json.get('url', None) is not None else None, + stack_trace=StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None, + exception=RemoteObject.from_json(json['exception']) if json.get('exception', None) is not None else None, + execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if json.get('executionContextId', None) is not None else None, + exception_meta_data=dict(json['exceptionMetaData']) if json.get('exceptionMetaData', None) is not None else None, ) @@ -677,9 +713,9 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> StackTrace: return cls( call_frames=[CallFrame.from_json(i) for i in json['callFrames']], - description=str(json['description']) if 'description' in json else None, - parent=StackTrace.from_json(json['parent']) if 'parent' in json else None, - parent_id=StackTraceId.from_json(json['parentId']) if 'parentId' in json else None, + description=str(json['description']) if json.get('description', None) is not None else None, + parent=StackTrace.from_json(json['parent']) if json.get('parent', None) is not None else None, + parent_id=StackTraceId.from_json(json['parentId']) if json.get('parentId', None) is not None else None, ) @@ -719,7 +755,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> StackTraceId: return cls( id_=str(json['id']), - debugger_id=UniqueDebuggerId.from_json(json['debuggerId']) if 'debuggerId' in json else None, + debugger_id=UniqueDebuggerId.from_json(json['debuggerId']) if json.get('debuggerId', None) is not None else None, ) @@ -752,7 +788,7 @@ def await_promise( json = yield cmd_dict return ( RemoteObject.from_json(json['result']), - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -767,7 +803,9 @@ def call_function_on( await_promise: typing.Optional[bool] = None, execution_context_id: typing.Optional[ExecutionContextId] = None, object_group: typing.Optional[str] = None, - throw_on_side_effect: typing.Optional[bool] = None + throw_on_side_effect: typing.Optional[bool] = None, + unique_context_id: typing.Optional[str] = None, + generate_web_driver_value: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]: ''' Calls function with given declaration on the given object. Object group of the result is @@ -780,10 +818,12 @@ def call_function_on( :param return_by_value: *(Optional)* Whether the result is expected to be a JSON object which should be sent by value. :param generate_preview: **(EXPERIMENTAL)** *(Optional)* Whether preview should be generated for the result. :param user_gesture: *(Optional)* Whether execution should be treated as initiated by user in the UI. - :param await_promise: *(Optional)* Whether execution should ````await``` for resulting value and return once awaited promise is resolved. + :param await_promise: *(Optional)* Whether execution should ````await```` for resulting value and return once awaited promise is resolved. :param execution_context_id: *(Optional)* Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified. :param object_group: *(Optional)* Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object. :param throw_on_side_effect: **(EXPERIMENTAL)** *(Optional)* Whether to throw an exception if side effect cannot be ruled out during evaluation. + :param unique_context_id: **(EXPERIMENTAL)** *(Optional)* An alternative way to specify the execution context to call function on. Compared to contextId that may be reused across processes, this is guaranteed to be system-unique, so it can be used to prevent accidental function call in context different than intended (e.g. as a result of navigation across process boundaries). This is mutually exclusive with ````executionContextId````. + :param generate_web_driver_value: **(EXPERIMENTAL)** *(Optional)* Whether the result should contain ````webDriverValue````, serialized according to https://w3c.github.io/webdriver-bidi. This is mutually exclusive with ````returnByValue````, but resulting ````objectId``` is still provided. :returns: A tuple with the following items: 0. **result** - Call result. @@ -811,6 +851,10 @@ def call_function_on( params['objectGroup'] = object_group if throw_on_side_effect is not None: params['throwOnSideEffect'] = throw_on_side_effect + if unique_context_id is not None: + params['uniqueContextId'] = unique_context_id + if generate_web_driver_value is not None: + params['generateWebDriverValue'] = generate_web_driver_value cmd_dict: T_JSON_DICT = { 'method': 'Runtime.callFunctionOn', 'params': params, @@ -818,7 +862,7 @@ def call_function_on( json = yield cmd_dict return ( RemoteObject.from_json(json['result']), - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -852,8 +896,8 @@ def compile_script( } json = yield cmd_dict return ( - ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None, - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None, + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -904,7 +948,8 @@ def evaluate( disable_breaks: typing.Optional[bool] = None, repl_mode: typing.Optional[bool] = None, allow_unsafe_eval_blocked_by_csp: typing.Optional[bool] = None, - unique_context_id: typing.Optional[str] = None + unique_context_id: typing.Optional[str] = None, + generate_web_driver_value: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]: ''' Evaluates expression on global object. @@ -924,6 +969,7 @@ def evaluate( :param repl_mode: **(EXPERIMENTAL)** *(Optional)* Setting this flag to true enables ````let```` re-declaration and top-level ````await````. Note that ````let```` variables can only be re-declared if they originate from ````replMode```` themselves. :param allow_unsafe_eval_blocked_by_csp: **(EXPERIMENTAL)** *(Optional)* The Content Security Policy (CSP) for the target might block 'unsafe-eval' which includes eval(), Function(), setTimeout() and setInterval() when called with non-callable arguments. This flag bypasses CSP for this evaluation and allows unsafe-eval. Defaults to true. :param unique_context_id: **(EXPERIMENTAL)** *(Optional)* An alternative way to specify the execution context to evaluate in. Compared to contextId that may be reused across processes, this is guaranteed to be system-unique, so it can be used to prevent accidental evaluation of the expression in context different than intended (e.g. as a result of navigation across process boundaries). This is mutually exclusive with ````contextId```. + :param generate_web_driver_value: **(EXPERIMENTAL)** *(Optional)* Whether the result should be serialized according to https://w3c.github.io/webdriver-bidi. :returns: A tuple with the following items: 0. **result** - Evaluation result. @@ -959,6 +1005,8 @@ def evaluate( params['allowUnsafeEvalBlockedByCSP'] = allow_unsafe_eval_blocked_by_csp if unique_context_id is not None: params['uniqueContextId'] = unique_context_id + if generate_web_driver_value is not None: + params['generateWebDriverValue'] = generate_web_driver_value cmd_dict: T_JSON_DICT = { 'method': 'Runtime.evaluate', 'params': params, @@ -966,7 +1014,7 @@ def evaluate( json = yield cmd_dict return ( RemoteObject.from_json(json['result']), - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -1047,9 +1095,9 @@ def get_properties( json = yield cmd_dict return ( [PropertyDescriptor.from_json(i) for i in json['result']], - [InternalPropertyDescriptor.from_json(i) for i in json['internalProperties']] if 'internalProperties' in json else None, - [PrivatePropertyDescriptor.from_json(i) for i in json['privateProperties']] if 'privateProperties' in json else None, - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + [InternalPropertyDescriptor.from_json(i) for i in json['internalProperties']] if json.get('internalProperties', None) is not None else None, + [PrivatePropertyDescriptor.from_json(i) for i in json['privateProperties']] if json.get('privateProperties', None) is not None else None, + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -1187,7 +1235,7 @@ def run_script( json = yield cmd_dict return ( RemoteObject.from_json(json['result']), - ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None + ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None ) @@ -1311,6 +1359,31 @@ def remove_binding( json = yield cmd_dict +def get_exception_details( + error_object_id: RemoteObjectId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[ExceptionDetails]]: + ''' + This method tries to lookup and populate exception details for a + JavaScript Error object. + Note that the stackTrace portion of the resulting exceptionDetails will + only be populated if the Runtime domain was enabled at the time when the + Error was thrown. + + **EXPERIMENTAL** + + :param error_object_id: The error object for which to resolve the exception details. + :returns: + ''' + params: T_JSON_DICT = dict() + params['errorObjectId'] = error_object_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'Runtime.getExceptionDetails', + 'params': params, + } + json = yield cmd_dict + return ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None + + @event_class('Runtime.bindingCalled') @dataclass class BindingCalled: @@ -1363,8 +1436,8 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleAPICalled: args=[RemoteObject.from_json(i) for i in json['args']], execution_context_id=ExecutionContextId.from_json(json['executionContextId']), timestamp=Timestamp.from_json(json['timestamp']), - stack_trace=StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None, - context=str(json['context']) if 'context' in json else None + stack_trace=StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None, + context=str(json['context']) if json.get('context', None) is not None else None ) @@ -1429,11 +1502,14 @@ class ExecutionContextDestroyed: ''' #: Id of the destroyed context execution_context_id: ExecutionContextId + #: Unique Id of the destroyed context + execution_context_unique_id: str @classmethod def from_json(cls, json: T_JSON_DICT) -> ExecutionContextDestroyed: return cls( - execution_context_id=ExecutionContextId.from_json(json['executionContextId']) + execution_context_id=ExecutionContextId.from_json(json['executionContextId']), + execution_context_unique_id=str(json['executionContextUniqueId']) ) @@ -1469,5 +1545,5 @@ def from_json(cls, json: T_JSON_DICT) -> InspectRequested: return cls( object_=RemoteObject.from_json(json['object']), hints=dict(json['hints']), - execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None + execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if json.get('executionContextId', None) is not None else None ) diff --git a/pycdp/cdp/security.py b/pycdp/cdp/security.py index c8d4c98..93313fb 100644 --- a/pycdp/cdp/security.py +++ b/pycdp/cdp/security.py @@ -168,9 +168,9 @@ def from_json(cls, json: T_JSON_DICT) -> CertificateSecurityState: obsolete_ssl_key_exchange=bool(json['obsoleteSslKeyExchange']), obsolete_ssl_cipher=bool(json['obsoleteSslCipher']), obsolete_ssl_signature=bool(json['obsoleteSslSignature']), - key_exchange_group=str(json['keyExchangeGroup']) if 'keyExchangeGroup' in json else None, - mac=str(json['mac']) if 'mac' in json else None, - certificate_network_error=str(json['certificateNetworkError']) if 'certificateNetworkError' in json else None, + key_exchange_group=str(json['keyExchangeGroup']) if json.get('keyExchangeGroup', None) is not None else None, + mac=str(json['mac']) if json.get('mac', None) is not None else None, + certificate_network_error=str(json['certificateNetworkError']) if json.get('certificateNetworkError', None) is not None else None, ) @@ -205,7 +205,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> SafetyTipInfo: return cls( safety_tip_status=SafetyTipStatus.from_json(json['safetyTipStatus']), - safe_url=str(json['safeUrl']) if 'safeUrl' in json else None, + safe_url=str(json['safeUrl']) if json.get('safeUrl', None) is not None else None, ) @@ -241,8 +241,8 @@ def from_json(cls, json: T_JSON_DICT) -> VisibleSecurityState: return cls( security_state=SecurityState.from_json(json['securityState']), security_state_issue_ids=[str(i) for i in json['securityStateIssueIds']], - certificate_security_state=CertificateSecurityState.from_json(json['certificateSecurityState']) if 'certificateSecurityState' in json else None, - safety_tip_info=SafetyTipInfo.from_json(json['safetyTipInfo']) if 'safetyTipInfo' in json else None, + certificate_security_state=CertificateSecurityState.from_json(json['certificateSecurityState']) if json.get('certificateSecurityState', None) is not None else None, + safety_tip_info=SafetyTipInfo.from_json(json['safetyTipInfo']) if json.get('safetyTipInfo', None) is not None else None, ) @@ -293,7 +293,7 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityStateExplanation: description=str(json['description']), mixed_content_type=MixedContentType.from_json(json['mixedContentType']), certificate=[str(i) for i in json['certificate']], - recommendations=[str(i) for i in json['recommendations']] if 'recommendations' in json else None, + recommendations=[str(i) for i in json['recommendations']] if json.get('recommendations', None) is not None else None, ) @@ -516,5 +516,5 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityStateChanged: scheme_is_cryptographic=bool(json['schemeIsCryptographic']), explanations=[SecurityStateExplanation.from_json(i) for i in json['explanations']], insecure_content_status=InsecureContentStatus.from_json(json['insecureContentStatus']), - summary=str(json['summary']) if 'summary' in json else None + summary=str(json['summary']) if json.get('summary', None) is not None else None ) diff --git a/pycdp/cdp/service_worker.py b/pycdp/cdp/service_worker.py index 3a4db60..a303e9f 100644 --- a/pycdp/cdp/service_worker.py +++ b/pycdp/cdp/service_worker.py @@ -134,10 +134,10 @@ def from_json(cls, json: T_JSON_DICT) -> ServiceWorkerVersion: script_url=str(json['scriptURL']), running_status=ServiceWorkerVersionRunningStatus.from_json(json['runningStatus']), status=ServiceWorkerVersionStatus.from_json(json['status']), - script_last_modified=float(json['scriptLastModified']) if 'scriptLastModified' in json else None, - script_response_time=float(json['scriptResponseTime']) if 'scriptResponseTime' in json else None, - controlled_clients=[target.TargetID.from_json(i) for i in json['controlledClients']] if 'controlledClients' in json else None, - target_id=target.TargetID.from_json(json['targetId']) if 'targetId' in json else None, + script_last_modified=float(json['scriptLastModified']) if json.get('scriptLastModified', None) is not None else None, + script_response_time=float(json['scriptResponseTime']) if json.get('scriptResponseTime', None) is not None else None, + controlled_clients=[target.TargetID.from_json(i) for i in json['controlledClients']] if json.get('controlledClients', None) is not None else None, + target_id=target.TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None, ) diff --git a/pycdp/cdp/storage.py b/pycdp/cdp/storage.py index 75f1b6b..4d79f57 100644 --- a/pycdp/cdp/storage.py +++ b/pycdp/cdp/storage.py @@ -13,6 +13,19 @@ from . import browser from . import network +from . import page + + +class SerializedStorageKey(str): + def to_json(self) -> str: + return self + + @classmethod + def from_json(cls, json: str) -> SerializedStorageKey: + return cls(json) + + def __repr__(self): + return 'SerializedStorageKey({})'.format(super().__repr__()) class StorageType(enum.Enum): @@ -29,6 +42,8 @@ class StorageType(enum.Enum): SERVICE_WORKERS = "service_workers" CACHE_STORAGE = "cache_storage" INTEREST_GROUPS = "interest_groups" + SHARED_STORAGE = "shared_storage" + STORAGE_BUCKETS = "storage_buckets" ALL_ = "all" OTHER = "other" @@ -96,6 +111,7 @@ class InterestGroupAccessType(enum.Enum): JOIN = "join" LEAVE = "leave" UPDATE = "update" + LOADED = "loaded" BID = "bid" WIN = "win" @@ -127,7 +143,7 @@ def to_json(self) -> T_JSON_DICT: def from_json(cls, json: T_JSON_DICT) -> InterestGroupAd: return cls( render_url=str(json['renderUrl']), - metadata=str(json['metadata']) if 'metadata' in json else None, + metadata=str(json['metadata']) if json.get('metadata', None) is not None else None, ) @@ -191,14 +207,294 @@ def from_json(cls, json: T_JSON_DICT) -> InterestGroupDetails: trusted_bidding_signals_keys=[str(i) for i in json['trustedBiddingSignalsKeys']], ads=[InterestGroupAd.from_json(i) for i in json['ads']], ad_components=[InterestGroupAd.from_json(i) for i in json['adComponents']], - bidding_url=str(json['biddingUrl']) if 'biddingUrl' in json else None, - bidding_wasm_helper_url=str(json['biddingWasmHelperUrl']) if 'biddingWasmHelperUrl' in json else None, - update_url=str(json['updateUrl']) if 'updateUrl' in json else None, - trusted_bidding_signals_url=str(json['trustedBiddingSignalsUrl']) if 'trustedBiddingSignalsUrl' in json else None, - user_bidding_signals=str(json['userBiddingSignals']) if 'userBiddingSignals' in json else None, + bidding_url=str(json['biddingUrl']) if json.get('biddingUrl', None) is not None else None, + bidding_wasm_helper_url=str(json['biddingWasmHelperUrl']) if json.get('biddingWasmHelperUrl', None) is not None else None, + update_url=str(json['updateUrl']) if json.get('updateUrl', None) is not None else None, + trusted_bidding_signals_url=str(json['trustedBiddingSignalsUrl']) if json.get('trustedBiddingSignalsUrl', None) is not None else None, + user_bidding_signals=str(json['userBiddingSignals']) if json.get('userBiddingSignals', None) is not None else None, + ) + + +class SharedStorageAccessType(enum.Enum): + ''' + Enum of shared storage access types. + ''' + DOCUMENT_ADD_MODULE = "documentAddModule" + DOCUMENT_SELECT_URL = "documentSelectURL" + DOCUMENT_RUN = "documentRun" + DOCUMENT_SET = "documentSet" + DOCUMENT_APPEND = "documentAppend" + DOCUMENT_DELETE = "documentDelete" + DOCUMENT_CLEAR = "documentClear" + WORKLET_SET = "workletSet" + WORKLET_APPEND = "workletAppend" + WORKLET_DELETE = "workletDelete" + WORKLET_CLEAR = "workletClear" + WORKLET_GET = "workletGet" + WORKLET_KEYS = "workletKeys" + WORKLET_ENTRIES = "workletEntries" + WORKLET_LENGTH = "workletLength" + WORKLET_REMAINING_BUDGET = "workletRemainingBudget" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> SharedStorageAccessType: + return cls(json) + + +@dataclass +class SharedStorageEntry: + ''' + Struct for a single key-value pair in an origin's shared storage. + ''' + key: str + + value: str + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['key'] = self.key + json['value'] = self.value + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageEntry: + return cls( + key=str(json['key']), + value=str(json['value']), + ) + + +@dataclass +class SharedStorageMetadata: + ''' + Details for an origin's shared storage. + ''' + creation_time: network.TimeSinceEpoch + + length: int + + remaining_budget: float + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['creationTime'] = self.creation_time.to_json() + json['length'] = self.length + json['remainingBudget'] = self.remaining_budget + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageMetadata: + return cls( + creation_time=network.TimeSinceEpoch.from_json(json['creationTime']), + length=int(json['length']), + remaining_budget=float(json['remainingBudget']), + ) + + +@dataclass +class SharedStorageReportingMetadata: + ''' + Pair of reporting metadata details for a candidate URL for ``selectURL()``. + ''' + event_type: str + + reporting_url: str + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['eventType'] = self.event_type + json['reportingUrl'] = self.reporting_url + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageReportingMetadata: + return cls( + event_type=str(json['eventType']), + reporting_url=str(json['reportingUrl']), + ) + + +@dataclass +class SharedStorageUrlWithMetadata: + ''' + Bundles a candidate URL with its reporting metadata. + ''' + #: Spec of candidate URL. + url: str + + #: Any associated reporting metadata. + reporting_metadata: typing.List[SharedStorageReportingMetadata] + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['url'] = self.url + json['reportingMetadata'] = [i.to_json() for i in self.reporting_metadata] + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageUrlWithMetadata: + return cls( + url=str(json['url']), + reporting_metadata=[SharedStorageReportingMetadata.from_json(i) for i in json['reportingMetadata']], + ) + + +@dataclass +class SharedStorageAccessParams: + ''' + Bundles the parameters for shared storage access events whose + presence/absence can vary according to SharedStorageAccessType. + ''' + #: Spec of the module script URL. + #: Present only for SharedStorageAccessType.documentAddModule. + script_source_url: typing.Optional[str] = None + + #: Name of the registered operation to be run. + #: Present only for SharedStorageAccessType.documentRun and + #: SharedStorageAccessType.documentSelectURL. + operation_name: typing.Optional[str] = None + + #: The operation's serialized data in bytes (converted to a string). + #: Present only for SharedStorageAccessType.documentRun and + #: SharedStorageAccessType.documentSelectURL. + serialized_data: typing.Optional[str] = None + + #: Array of candidate URLs' specs, along with any associated metadata. + #: Present only for SharedStorageAccessType.documentSelectURL. + urls_with_metadata: typing.Optional[typing.List[SharedStorageUrlWithMetadata]] = None + + #: Key for a specific entry in an origin's shared storage. + #: Present only for SharedStorageAccessType.documentSet, + #: SharedStorageAccessType.documentAppend, + #: SharedStorageAccessType.documentDelete, + #: SharedStorageAccessType.workletSet, + #: SharedStorageAccessType.workletAppend, + #: SharedStorageAccessType.workletDelete, and + #: SharedStorageAccessType.workletGet. + key: typing.Optional[str] = None + + #: Value for a specific entry in an origin's shared storage. + #: Present only for SharedStorageAccessType.documentSet, + #: SharedStorageAccessType.documentAppend, + #: SharedStorageAccessType.workletSet, and + #: SharedStorageAccessType.workletAppend. + value: typing.Optional[str] = None + + #: Whether or not to set an entry for a key if that key is already present. + #: Present only for SharedStorageAccessType.documentSet and + #: SharedStorageAccessType.workletSet. + ignore_if_present: typing.Optional[bool] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + if self.script_source_url is not None: + json['scriptSourceUrl'] = self.script_source_url + if self.operation_name is not None: + json['operationName'] = self.operation_name + if self.serialized_data is not None: + json['serializedData'] = self.serialized_data + if self.urls_with_metadata is not None: + json['urlsWithMetadata'] = [i.to_json() for i in self.urls_with_metadata] + if self.key is not None: + json['key'] = self.key + if self.value is not None: + json['value'] = self.value + if self.ignore_if_present is not None: + json['ignoreIfPresent'] = self.ignore_if_present + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageAccessParams: + return cls( + script_source_url=str(json['scriptSourceUrl']) if json.get('scriptSourceUrl', None) is not None else None, + operation_name=str(json['operationName']) if json.get('operationName', None) is not None else None, + serialized_data=str(json['serializedData']) if json.get('serializedData', None) is not None else None, + urls_with_metadata=[SharedStorageUrlWithMetadata.from_json(i) for i in json['urlsWithMetadata']] if json.get('urlsWithMetadata', None) is not None else None, + key=str(json['key']) if json.get('key', None) is not None else None, + value=str(json['value']) if json.get('value', None) is not None else None, + ignore_if_present=bool(json['ignoreIfPresent']) if json.get('ignoreIfPresent', None) is not None else None, + ) + + +class StorageBucketsDurability(enum.Enum): + RELAXED = "relaxed" + STRICT = "strict" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> StorageBucketsDurability: + return cls(json) + + +@dataclass +class StorageBucketInfo: + storage_key: SerializedStorageKey + + id_: str + + name: str + + is_default: bool + + expiration: network.TimeSinceEpoch + + #: Storage quota (bytes). + quota: float + + persistent: bool + + durability: StorageBucketsDurability + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['storageKey'] = self.storage_key.to_json() + json['id'] = self.id_ + json['name'] = self.name + json['isDefault'] = self.is_default + json['expiration'] = self.expiration.to_json() + json['quota'] = self.quota + json['persistent'] = self.persistent + json['durability'] = self.durability.to_json() + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> StorageBucketInfo: + return cls( + storage_key=SerializedStorageKey.from_json(json['storageKey']), + id_=str(json['id']), + name=str(json['name']), + is_default=bool(json['isDefault']), + expiration=network.TimeSinceEpoch.from_json(json['expiration']), + quota=float(json['quota']), + persistent=bool(json['persistent']), + durability=StorageBucketsDurability.from_json(json['durability']), ) +def get_storage_key_for_frame( + frame_id: page.FrameId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SerializedStorageKey]: + ''' + Returns a storage key given a frame id. + + :param frame_id: + :returns: + ''' + params: T_JSON_DICT = dict() + params['frameId'] = frame_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.getStorageKeyForFrame', + 'params': params, + } + json = yield cmd_dict + return SerializedStorageKey.from_json(json['storageKey']) + + def clear_data_for_origin( origin: str, storage_types: str @@ -219,6 +515,26 @@ def clear_data_for_origin( json = yield cmd_dict +def clear_data_for_storage_key( + storage_key: str, + storage_types: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Clears storage for storage key. + + :param storage_key: Storage key. + :param storage_types: Comma separated list of StorageType to clear. + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + params['storageTypes'] = storage_types + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.clearDataForStorageKey', + 'params': params, + } + json = yield cmd_dict + + def get_cookies( browser_context_id: typing.Optional[browser.BrowserContextID] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[network.Cookie]]: @@ -347,6 +663,23 @@ def track_cache_storage_for_origin( json = yield cmd_dict +def track_cache_storage_for_storage_key( + storage_key: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Registers storage key to be notified when an update occurs to its cache storage list. + + :param storage_key: Storage key. + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.trackCacheStorageForStorageKey', + 'params': params, + } + json = yield cmd_dict + + def track_indexed_db_for_origin( origin: str ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: @@ -364,6 +697,23 @@ def track_indexed_db_for_origin( json = yield cmd_dict +def track_indexed_db_for_storage_key( + storage_key: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Registers storage key to be notified when an update occurs to its IndexedDB. + + :param storage_key: Storage key. + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.trackIndexedDBForStorageKey', + 'params': params, + } + json = yield cmd_dict + + def untrack_cache_storage_for_origin( origin: str ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: @@ -381,6 +731,23 @@ def untrack_cache_storage_for_origin( json = yield cmd_dict +def untrack_cache_storage_for_storage_key( + storage_key: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Unregisters storage key from receiving notifications for cache storage. + + :param storage_key: Storage key. + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.untrackCacheStorageForStorageKey', + 'params': params, + } + json = yield cmd_dict + + def untrack_indexed_db_for_origin( origin: str ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: @@ -398,6 +765,23 @@ def untrack_indexed_db_for_origin( json = yield cmd_dict +def untrack_indexed_db_for_storage_key( + storage_key: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Unregisters storage key from receiving notifications for IndexedDB. + + :param storage_key: Storage key. + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.untrackIndexedDBForStorageKey', + 'params': params, + } + json = yield cmd_dict + + def get_trust_tokens() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TrustTokens]]: ''' Returns the number of stored Trust Tokens per issuer for the @@ -479,6 +863,200 @@ def set_interest_group_tracking( json = yield cmd_dict +def get_shared_storage_metadata( + owner_origin: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SharedStorageMetadata]: + ''' + Gets metadata for an origin's shared storage. + + **EXPERIMENTAL** + + :param owner_origin: + :returns: + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.getSharedStorageMetadata', + 'params': params, + } + json = yield cmd_dict + return SharedStorageMetadata.from_json(json['metadata']) + + +def get_shared_storage_entries( + owner_origin: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[SharedStorageEntry]]: + ''' + Gets the entries in an given origin's shared storage. + + **EXPERIMENTAL** + + :param owner_origin: + :returns: + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.getSharedStorageEntries', + 'params': params, + } + json = yield cmd_dict + return [SharedStorageEntry.from_json(i) for i in json['entries']] + + +def set_shared_storage_entry( + owner_origin: str, + key: str, + value: str, + ignore_if_present: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Sets entry with ``key`` and ``value`` for a given origin's shared storage. + + **EXPERIMENTAL** + + :param owner_origin: + :param key: + :param value: + :param ignore_if_present: *(Optional)* If ```ignoreIfPresent```` is included and true, then only sets the entry if ````key``` doesn't already exist. + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + params['key'] = key + params['value'] = value + if ignore_if_present is not None: + params['ignoreIfPresent'] = ignore_if_present + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.setSharedStorageEntry', + 'params': params, + } + json = yield cmd_dict + + +def delete_shared_storage_entry( + owner_origin: str, + key: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Deletes entry for ``key`` (if it exists) for a given origin's shared storage. + + **EXPERIMENTAL** + + :param owner_origin: + :param key: + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + params['key'] = key + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.deleteSharedStorageEntry', + 'params': params, + } + json = yield cmd_dict + + +def clear_shared_storage_entries( + owner_origin: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Clears all entries for a given origin's shared storage. + + **EXPERIMENTAL** + + :param owner_origin: + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.clearSharedStorageEntries', + 'params': params, + } + json = yield cmd_dict + + +def reset_shared_storage_budget( + owner_origin: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Resets the budget for ``ownerOrigin`` by clearing all budget withdrawals. + + **EXPERIMENTAL** + + :param owner_origin: + ''' + params: T_JSON_DICT = dict() + params['ownerOrigin'] = owner_origin + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.resetSharedStorageBudget', + 'params': params, + } + json = yield cmd_dict + + +def set_shared_storage_tracking( + enable: bool + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Enables/disables issuing of sharedStorageAccessed events. + + **EXPERIMENTAL** + + :param enable: + ''' + params: T_JSON_DICT = dict() + params['enable'] = enable + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.setSharedStorageTracking', + 'params': params, + } + json = yield cmd_dict + + +def set_storage_bucket_tracking( + storage_key: str, + enable: bool + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Set tracking for a storage key's buckets. + + **EXPERIMENTAL** + + :param storage_key: + :param enable: + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + params['enable'] = enable + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.setStorageBucketTracking', + 'params': params, + } + json = yield cmd_dict + + +def delete_storage_bucket( + storage_key: str, + bucket_name: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Deletes the Storage Bucket with the given storage key and bucket name. + + **EXPERIMENTAL** + + :param storage_key: + :param bucket_name: + ''' + params: T_JSON_DICT = dict() + params['storageKey'] = storage_key + params['bucketName'] = bucket_name + cmd_dict: T_JSON_DICT = { + 'method': 'Storage.deleteStorageBucket', + 'params': params, + } + json = yield cmd_dict + + @event_class('Storage.cacheStorageContentUpdated') @dataclass class CacheStorageContentUpdated: @@ -487,6 +1065,8 @@ class CacheStorageContentUpdated: ''' #: Origin to update. origin: str + #: Storage key to update. + storage_key: str #: Name of cache in origin. cache_name: str @@ -494,6 +1074,7 @@ class CacheStorageContentUpdated: def from_json(cls, json: T_JSON_DICT) -> CacheStorageContentUpdated: return cls( origin=str(json['origin']), + storage_key=str(json['storageKey']), cache_name=str(json['cacheName']) ) @@ -506,11 +1087,14 @@ class CacheStorageListUpdated: ''' #: Origin to update. origin: str + #: Storage key to update. + storage_key: str @classmethod def from_json(cls, json: T_JSON_DICT) -> CacheStorageListUpdated: return cls( - origin=str(json['origin']) + origin=str(json['origin']), + storage_key=str(json['storageKey']) ) @@ -522,6 +1106,8 @@ class IndexedDBContentUpdated: ''' #: Origin to update. origin: str + #: Storage key to update. + storage_key: str #: Database to update. database_name: str #: ObjectStore to update. @@ -531,6 +1117,7 @@ class IndexedDBContentUpdated: def from_json(cls, json: T_JSON_DICT) -> IndexedDBContentUpdated: return cls( origin=str(json['origin']), + storage_key=str(json['storageKey']), database_name=str(json['databaseName']), object_store_name=str(json['objectStoreName']) ) @@ -544,11 +1131,14 @@ class IndexedDBListUpdated: ''' #: Origin to update. origin: str + #: Storage key to update. + storage_key: str @classmethod def from_json(cls, json: T_JSON_DICT) -> IndexedDBListUpdated: return cls( - origin=str(json['origin']) + origin=str(json['origin']), + storage_key=str(json['storageKey']) ) @@ -571,3 +1161,57 @@ def from_json(cls, json: T_JSON_DICT) -> InterestGroupAccessed: owner_origin=str(json['ownerOrigin']), name=str(json['name']) ) + + +@event_class('Storage.sharedStorageAccessed') +@dataclass +class SharedStorageAccessed: + ''' + Shared storage was accessed by the associated page. + The following parameters are included in all events. + ''' + #: Time of the access. + access_time: network.TimeSinceEpoch + #: Enum value indicating the Shared Storage API method invoked. + type_: SharedStorageAccessType + #: DevTools Frame Token for the primary frame tree's root. + main_frame_id: page.FrameId + #: Serialized origin for the context that invoked the Shared Storage API. + owner_origin: str + #: The sub-parameters warapped by ``params`` are all optional and their + #: presence/absence depends on ``type``. + params: SharedStorageAccessParams + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> SharedStorageAccessed: + return cls( + access_time=network.TimeSinceEpoch.from_json(json['accessTime']), + type_=SharedStorageAccessType.from_json(json['type']), + main_frame_id=page.FrameId.from_json(json['mainFrameId']), + owner_origin=str(json['ownerOrigin']), + params=SharedStorageAccessParams.from_json(json['params']) + ) + + +@event_class('Storage.storageBucketCreatedOrUpdated') +@dataclass +class StorageBucketCreatedOrUpdated: + bucket: StorageBucketInfo + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> StorageBucketCreatedOrUpdated: + return cls( + bucket=StorageBucketInfo.from_json(json['bucket']) + ) + + +@event_class('Storage.storageBucketDeleted') +@dataclass +class StorageBucketDeleted: + bucket_id: str + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> StorageBucketDeleted: + return cls( + bucket_id=str(json['bucketId']) + ) diff --git a/pycdp/cdp/system_info.py b/pycdp/cdp/system_info.py index 4785d98..45fffd2 100644 --- a/pycdp/cdp/system_info.py +++ b/pycdp/cdp/system_info.py @@ -64,8 +64,8 @@ def from_json(cls, json: T_JSON_DICT) -> GPUDevice: device_string=str(json['deviceString']), driver_vendor=str(json['driverVendor']), driver_version=str(json['driverVersion']), - sub_sys_id=float(json['subSysId']) if 'subSysId' in json else None, - revision=float(json['revision']) if 'revision' in json else None, + sub_sys_id=float(json['subSysId']) if json.get('subSysId', None) is not None else None, + revision=float(json['revision']) if json.get('revision', None) is not None else None, ) @@ -277,8 +277,8 @@ def from_json(cls, json: T_JSON_DICT) -> GPUInfo: video_decoding=[VideoDecodeAcceleratorCapability.from_json(i) for i in json['videoDecoding']], video_encoding=[VideoEncodeAcceleratorCapability.from_json(i) for i in json['videoEncoding']], image_decoding=[ImageDecodeAcceleratorCapability.from_json(i) for i in json['imageDecoding']], - aux_attributes=dict(json['auxAttributes']) if 'auxAttributes' in json else None, - feature_status=dict(json['featureStatus']) if 'featureStatus' in json else None, + aux_attributes=dict(json['auxAttributes']) if json.get('auxAttributes', None) is not None else None, + feature_status=dict(json['featureStatus']) if json.get('featureStatus', None) is not None else None, ) @@ -336,6 +336,25 @@ def get_info() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[GPUInfo, ) +def get_feature_state( + feature_state: str + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]: + ''' + Returns information about the feature state. + + :param feature_state: + :returns: + ''' + params: T_JSON_DICT = dict() + params['featureState'] = feature_state + cmd_dict: T_JSON_DICT = { + 'method': 'SystemInfo.getFeatureState', + 'params': params, + } + json = yield cmd_dict + return bool(json['featureEnabled']) + + def get_process_info() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ProcessInfo]]: ''' Returns information about all running processes. diff --git a/pycdp/cdp/target.py b/pycdp/cdp/target.py index cb0b707..13bb14c 100644 --- a/pycdp/cdp/target.py +++ b/pycdp/cdp/target.py @@ -67,6 +67,10 @@ class TargetInfo: browser_context_id: typing.Optional[browser.BrowserContextID] = None + #: Provides additional details for specific target types. For example, for + #: the type of "page", this may be set to "portal" or "prerender". + subtype: typing.Optional[str] = None + def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() json['targetId'] = self.target_id.to_json() @@ -81,6 +85,8 @@ def to_json(self) -> T_JSON_DICT: json['openerFrameId'] = self.opener_frame_id.to_json() if self.browser_context_id is not None: json['browserContextId'] = self.browser_context_id.to_json() + if self.subtype is not None: + json['subtype'] = self.subtype return json @classmethod @@ -92,12 +98,60 @@ def from_json(cls, json: T_JSON_DICT) -> TargetInfo: url=str(json['url']), attached=bool(json['attached']), can_access_opener=bool(json['canAccessOpener']), - opener_id=TargetID.from_json(json['openerId']) if 'openerId' in json else None, - opener_frame_id=page.FrameId.from_json(json['openerFrameId']) if 'openerFrameId' in json else None, - browser_context_id=browser.BrowserContextID.from_json(json['browserContextId']) if 'browserContextId' in json else None, + opener_id=TargetID.from_json(json['openerId']) if json.get('openerId', None) is not None else None, + opener_frame_id=page.FrameId.from_json(json['openerFrameId']) if json.get('openerFrameId', None) is not None else None, + browser_context_id=browser.BrowserContextID.from_json(json['browserContextId']) if json.get('browserContextId', None) is not None else None, + subtype=str(json['subtype']) if json.get('subtype', None) is not None else None, ) +@dataclass +class FilterEntry: + ''' + A filter used by target query/discovery/auto-attach operations. + ''' + #: If set, causes exclusion of mathcing targets from the list. + exclude: typing.Optional[bool] = None + + #: If not present, matches any type. + type_: typing.Optional[str] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + if self.exclude is not None: + json['exclude'] = self.exclude + if self.type_ is not None: + json['type'] = self.type_ + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> FilterEntry: + return cls( + exclude=bool(json['exclude']) if json.get('exclude', None) is not None else None, + type_=str(json['type']) if json.get('type', None) is not None else None, + ) + + +class TargetFilter(list): + ''' + The entries in TargetFilter are matched sequentially against targets and + the first entry that matches determines if the target is included or not, + depending on the value of ``exclude`` field in the entry. + If filter is not specified, the one assumed is + [{type: "browser", exclude: true}, {type: "tab", exclude: true}, {}] + (i.e. include everything but ``browser`` and ``tab``). + ''' + def to_json(self) -> typing.List[FilterEntry]: + return self + + @classmethod + def from_json(cls, json: typing.List[FilterEntry]) -> TargetFilter: + return cls(json) + + def __repr__(self): + return 'TargetFilter({})'.format(super().__repr__()) + + @dataclass class RemoteLocation: host: str @@ -279,7 +333,8 @@ def create_target( browser_context_id: typing.Optional[browser.BrowserContextID] = None, enable_begin_frame_control: typing.Optional[bool] = None, new_window: typing.Optional[bool] = None, - background: typing.Optional[bool] = None + background: typing.Optional[bool] = None, + for_tab: typing.Optional[bool] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,TargetID]: ''' Creates a new page. @@ -291,6 +346,7 @@ def create_target( :param enable_begin_frame_control: **(EXPERIMENTAL)** *(Optional)* Whether BeginFrames for this target will be controlled via DevTools (headless chrome only, not supported on MacOS yet, false by default). :param new_window: *(Optional)* Whether to create a new Window or Tab (chrome-only, false by default). :param background: *(Optional)* Whether to create the target in background or foreground (chrome-only, false by default). + :param for_tab: **(EXPERIMENTAL)** *(Optional)* Whether to create the target of type "tab". :returns: The id of the page opened. ''' params: T_JSON_DICT = dict() @@ -307,6 +363,8 @@ def create_target( params['newWindow'] = new_window if background is not None: params['background'] = background + if for_tab is not None: + params['forTab'] = for_tab cmd_dict: T_JSON_DICT = { 'method': 'Target.createTarget', 'params': params, @@ -379,14 +437,21 @@ def get_target_info( return TargetInfo.from_json(json['targetInfo']) -def get_targets() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TargetInfo]]: +def get_targets( + filter_: typing.Optional[TargetFilter] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TargetInfo]]: ''' Retrieves a list of available targets. + :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be reported. If filter is not specified and target discovery is currently enabled, a filter used for target discovery is used for consistency. :returns: The list of targets. ''' + params: T_JSON_DICT = dict() + if filter_ is not None: + params['filter'] = filter_.to_json() cmd_dict: T_JSON_DICT = { 'method': 'Target.getTargets', + 'params': params, } json = yield cmd_dict return [TargetInfo.from_json(i) for i in json['targetInfos']] @@ -425,7 +490,8 @@ def send_message_to_target( def set_auto_attach( auto_attach: bool, wait_for_debugger_on_start: bool, - flatten: typing.Optional[bool] = None + flatten: typing.Optional[bool] = None, + filter_: typing.Optional[TargetFilter] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Controls whether to automatically attach to new targets which are considered to be related to @@ -439,12 +505,15 @@ def set_auto_attach( :param auto_attach: Whether to auto-attach to related targets. :param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets. :param flatten: *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands. We plan to make this the default, deprecate non-flattened mode, and eventually retire it. See crbug.com/991325. + :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached. ''' params: T_JSON_DICT = dict() params['autoAttach'] = auto_attach params['waitForDebuggerOnStart'] = wait_for_debugger_on_start if flatten is not None: params['flatten'] = flatten + if filter_ is not None: + params['filter'] = filter_.to_json() cmd_dict: T_JSON_DICT = { 'method': 'Target.setAutoAttach', 'params': params, @@ -454,7 +523,8 @@ def set_auto_attach( def auto_attach_related( target_id: TargetID, - wait_for_debugger_on_start: bool + wait_for_debugger_on_start: bool, + filter_: typing.Optional[TargetFilter] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Adds the specified target to the list of targets that will be monitored for any related target @@ -467,10 +537,13 @@ def auto_attach_related( :param target_id: :param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets. + :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached. ''' params: T_JSON_DICT = dict() params['targetId'] = target_id.to_json() params['waitForDebuggerOnStart'] = wait_for_debugger_on_start + if filter_ is not None: + params['filter'] = filter_.to_json() cmd_dict: T_JSON_DICT = { 'method': 'Target.autoAttachRelated', 'params': params, @@ -479,16 +552,20 @@ def auto_attach_related( def set_discover_targets( - discover: bool + discover: bool, + filter_: typing.Optional[TargetFilter] = None ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Controls whether to discover available targets and notify via ``targetCreated/targetInfoChanged/targetDestroyed`` events. :param discover: Whether to discover available targets. + :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached. If ```discover```` is false, ````filter``` must be omitted or empty. ''' params: T_JSON_DICT = dict() params['discover'] = discover + if filter_ is not None: + params['filter'] = filter_.to_json() cmd_dict: T_JSON_DICT = { 'method': 'Target.setDiscoverTargets', 'params': params, @@ -556,7 +633,7 @@ class DetachedFromTarget: def from_json(cls, json: T_JSON_DICT) -> DetachedFromTarget: return cls( session_id=SessionID.from_json(json['sessionId']), - target_id=TargetID.from_json(json['targetId']) if 'targetId' in json else None + target_id=TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None ) @@ -578,7 +655,7 @@ def from_json(cls, json: T_JSON_DICT) -> ReceivedMessageFromTarget: return cls( session_id=SessionID.from_json(json['sessionId']), message=str(json['message']), - target_id=TargetID.from_json(json['targetId']) if 'targetId' in json else None + target_id=TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None ) diff --git a/pycdp/cdp/tracing.py b/pycdp/cdp/tracing.py index 84eeb4b..d69a605 100644 --- a/pycdp/cdp/tracing.py +++ b/pycdp/cdp/tracing.py @@ -34,6 +34,10 @@ class TraceConfig: #: Controls how the trace buffer stores data. record_mode: typing.Optional[str] = None + #: Size of the trace buffer in kilobytes. If not specified or zero is passed, a default value + #: of 200 MB would be used. + trace_buffer_size_in_kb: typing.Optional[float] = None + #: Turns on JavaScript stack sampling. enable_sampling: typing.Optional[bool] = None @@ -59,6 +63,8 @@ def to_json(self) -> T_JSON_DICT: json: T_JSON_DICT = dict() if self.record_mode is not None: json['recordMode'] = self.record_mode + if self.trace_buffer_size_in_kb is not None: + json['traceBufferSizeInKb'] = self.trace_buffer_size_in_kb if self.enable_sampling is not None: json['enableSampling'] = self.enable_sampling if self.enable_systrace is not None: @@ -78,14 +84,15 @@ def to_json(self) -> T_JSON_DICT: @classmethod def from_json(cls, json: T_JSON_DICT) -> TraceConfig: return cls( - record_mode=str(json['recordMode']) if 'recordMode' in json else None, - enable_sampling=bool(json['enableSampling']) if 'enableSampling' in json else None, - enable_systrace=bool(json['enableSystrace']) if 'enableSystrace' in json else None, - enable_argument_filter=bool(json['enableArgumentFilter']) if 'enableArgumentFilter' in json else None, - included_categories=[str(i) for i in json['includedCategories']] if 'includedCategories' in json else None, - excluded_categories=[str(i) for i in json['excludedCategories']] if 'excludedCategories' in json else None, - synthetic_delays=[str(i) for i in json['syntheticDelays']] if 'syntheticDelays' in json else None, - memory_dump_config=MemoryDumpConfig.from_json(json['memoryDumpConfig']) if 'memoryDumpConfig' in json else None, + record_mode=str(json['recordMode']) if json.get('recordMode', None) is not None else None, + trace_buffer_size_in_kb=float(json['traceBufferSizeInKb']) if json.get('traceBufferSizeInKb', None) is not None else None, + enable_sampling=bool(json['enableSampling']) if json.get('enableSampling', None) is not None else None, + enable_systrace=bool(json['enableSystrace']) if json.get('enableSystrace', None) is not None else None, + enable_argument_filter=bool(json['enableArgumentFilter']) if json.get('enableArgumentFilter', None) is not None else None, + included_categories=[str(i) for i in json['includedCategories']] if json.get('includedCategories', None) is not None else None, + excluded_categories=[str(i) for i in json['excludedCategories']] if json.get('excludedCategories', None) is not None else None, + synthetic_delays=[str(i) for i in json['syntheticDelays']] if json.get('syntheticDelays', None) is not None else None, + memory_dump_config=MemoryDumpConfig.from_json(json['memoryDumpConfig']) if json.get('memoryDumpConfig', None) is not None else None, ) @@ -293,9 +300,9 @@ class BufferUsage: @classmethod def from_json(cls, json: T_JSON_DICT) -> BufferUsage: return cls( - percent_full=float(json['percentFull']) if 'percentFull' in json else None, - event_count=float(json['eventCount']) if 'eventCount' in json else None, - value=float(json['value']) if 'value' in json else None + percent_full=float(json['percentFull']) if json.get('percentFull', None) is not None else None, + event_count=float(json['eventCount']) if json.get('eventCount', None) is not None else None, + value=float(json['value']) if json.get('value', None) is not None else None ) @@ -303,8 +310,8 @@ def from_json(cls, json: T_JSON_DICT) -> BufferUsage: @dataclass class DataCollected: ''' - Contains an bucket of collected trace events. When tracing is stopped collected events will be - send as a sequence of dataCollected events followed by tracingComplete event. + Contains a bucket of collected trace events. When tracing is stopped collected events will be + sent as a sequence of dataCollected events followed by tracingComplete event. ''' value: typing.List[dict] @@ -336,7 +343,7 @@ class TracingComplete: def from_json(cls, json: T_JSON_DICT) -> TracingComplete: return cls( data_loss_occurred=bool(json['dataLossOccurred']), - stream=io.StreamHandle.from_json(json['stream']) if 'stream' in json else None, - trace_format=StreamFormat.from_json(json['traceFormat']) if 'traceFormat' in json else None, - stream_compression=StreamCompression.from_json(json['streamCompression']) if 'streamCompression' in json else None + stream=io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None, + trace_format=StreamFormat.from_json(json['traceFormat']) if json.get('traceFormat', None) is not None else None, + stream_compression=StreamCompression.from_json(json['streamCompression']) if json.get('streamCompression', None) is not None else None ) diff --git a/pycdp/cdp/web_audio.py b/pycdp/cdp/web_audio.py index 71dc4ce..011f52c 100644 --- a/pycdp/cdp/web_audio.py +++ b/pycdp/cdp/web_audio.py @@ -214,7 +214,7 @@ def from_json(cls, json: T_JSON_DICT) -> BaseAudioContext: callback_buffer_size=float(json['callbackBufferSize']), max_output_channel_count=float(json['maxOutputChannelCount']), sample_rate=float(json['sampleRate']), - realtime_data=ContextRealtimeData.from_json(json['realtimeData']) if 'realtimeData' in json else None, + realtime_data=ContextRealtimeData.from_json(json['realtimeData']) if json.get('realtimeData', None) is not None else None, ) @@ -535,8 +535,8 @@ def from_json(cls, json: T_JSON_DICT) -> NodesConnected: context_id=GraphObjectId.from_json(json['contextId']), source_id=GraphObjectId.from_json(json['sourceId']), destination_id=GraphObjectId.from_json(json['destinationId']), - source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None, - destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None + source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None, + destination_input_index=float(json['destinationInputIndex']) if json.get('destinationInputIndex', None) is not None else None ) @@ -558,8 +558,8 @@ def from_json(cls, json: T_JSON_DICT) -> NodesDisconnected: context_id=GraphObjectId.from_json(json['contextId']), source_id=GraphObjectId.from_json(json['sourceId']), destination_id=GraphObjectId.from_json(json['destinationId']), - source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None, - destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None + source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None, + destination_input_index=float(json['destinationInputIndex']) if json.get('destinationInputIndex', None) is not None else None ) @@ -580,7 +580,7 @@ def from_json(cls, json: T_JSON_DICT) -> NodeParamConnected: context_id=GraphObjectId.from_json(json['contextId']), source_id=GraphObjectId.from_json(json['sourceId']), destination_id=GraphObjectId.from_json(json['destinationId']), - source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None + source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None ) @@ -601,5 +601,5 @@ def from_json(cls, json: T_JSON_DICT) -> NodeParamDisconnected: context_id=GraphObjectId.from_json(json['contextId']), source_id=GraphObjectId.from_json(json['sourceId']), destination_id=GraphObjectId.from_json(json['destinationId']), - source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None + source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None ) diff --git a/pycdp/cdp/web_authn.py b/pycdp/cdp/web_authn.py index 4cec9c2..2e2f5a1 100644 --- a/pycdp/cdp/web_authn.py +++ b/pycdp/cdp/web_authn.py @@ -93,6 +93,11 @@ class VirtualAuthenticatorOptions: #: Defaults to false. has_min_pin_length: typing.Optional[bool] = None + #: If set to true, the authenticator will support the prf extension. + #: https://w3c.github.io/webauthn/#prf-extension + #: Defaults to false. + has_prf: typing.Optional[bool] = None + #: If set to true, tests of user presence will succeed immediately. #: Otherwise, they will not be resolved. Defaults to true. automatic_presence_simulation: typing.Optional[bool] = None @@ -117,6 +122,8 @@ def to_json(self) -> T_JSON_DICT: json['hasCredBlob'] = self.has_cred_blob if self.has_min_pin_length is not None: json['hasMinPinLength'] = self.has_min_pin_length + if self.has_prf is not None: + json['hasPrf'] = self.has_prf if self.automatic_presence_simulation is not None: json['automaticPresenceSimulation'] = self.automatic_presence_simulation if self.is_user_verified is not None: @@ -128,14 +135,15 @@ def from_json(cls, json: T_JSON_DICT) -> VirtualAuthenticatorOptions: return cls( protocol=AuthenticatorProtocol.from_json(json['protocol']), transport=AuthenticatorTransport.from_json(json['transport']), - ctap2_version=Ctap2Version.from_json(json['ctap2Version']) if 'ctap2Version' in json else None, - has_resident_key=bool(json['hasResidentKey']) if 'hasResidentKey' in json else None, - has_user_verification=bool(json['hasUserVerification']) if 'hasUserVerification' in json else None, - has_large_blob=bool(json['hasLargeBlob']) if 'hasLargeBlob' in json else None, - has_cred_blob=bool(json['hasCredBlob']) if 'hasCredBlob' in json else None, - has_min_pin_length=bool(json['hasMinPinLength']) if 'hasMinPinLength' in json else None, - automatic_presence_simulation=bool(json['automaticPresenceSimulation']) if 'automaticPresenceSimulation' in json else None, - is_user_verified=bool(json['isUserVerified']) if 'isUserVerified' in json else None, + ctap2_version=Ctap2Version.from_json(json['ctap2Version']) if json.get('ctap2Version', None) is not None else None, + has_resident_key=bool(json['hasResidentKey']) if json.get('hasResidentKey', None) is not None else None, + has_user_verification=bool(json['hasUserVerification']) if json.get('hasUserVerification', None) is not None else None, + has_large_blob=bool(json['hasLargeBlob']) if json.get('hasLargeBlob', None) is not None else None, + has_cred_blob=bool(json['hasCredBlob']) if json.get('hasCredBlob', None) is not None else None, + has_min_pin_length=bool(json['hasMinPinLength']) if json.get('hasMinPinLength', None) is not None else None, + has_prf=bool(json['hasPrf']) if json.get('hasPrf', None) is not None else None, + automatic_presence_simulation=bool(json['automaticPresenceSimulation']) if json.get('automaticPresenceSimulation', None) is not None else None, + is_user_verified=bool(json['isUserVerified']) if json.get('isUserVerified', None) is not None else None, ) @@ -186,19 +194,27 @@ def from_json(cls, json: T_JSON_DICT) -> Credential: is_resident_credential=bool(json['isResidentCredential']), private_key=str(json['privateKey']), sign_count=int(json['signCount']), - rp_id=str(json['rpId']) if 'rpId' in json else None, - user_handle=str(json['userHandle']) if 'userHandle' in json else None, - large_blob=str(json['largeBlob']) if 'largeBlob' in json else None, + rp_id=str(json['rpId']) if json.get('rpId', None) is not None else None, + user_handle=str(json['userHandle']) if json.get('userHandle', None) is not None else None, + large_blob=str(json['largeBlob']) if json.get('largeBlob', None) is not None else None, ) -def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: +def enable( + enable_ui: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: ''' Enable the WebAuthn domain and start intercepting credential storage and retrieval with a virtual authenticator. + + :param enable_ui: *(Optional)* Whether to enable the WebAuthn user interface. Enabling the UI is recommended for debugging and demo purposes, as it is closer to the real experience. Disabling the UI is recommended for automated testing. Supported at the embedder's discretion if UI is available. Defaults to false. ''' + params: T_JSON_DICT = dict() + if enable_ui is not None: + params['enableUI'] = enable_ui cmd_dict: T_JSON_DICT = { 'method': 'WebAuthn.enable', + 'params': params, } json = yield cmd_dict @@ -232,6 +248,35 @@ def add_virtual_authenticator( return AuthenticatorId.from_json(json['authenticatorId']) +def set_response_override_bits( + authenticator_id: AuthenticatorId, + is_bogus_signature: typing.Optional[bool] = None, + is_bad_uv: typing.Optional[bool] = None, + is_bad_up: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Resets parameters isBogusSignature, isBadUV, isBadUP to false if they are not present. + + :param authenticator_id: + :param is_bogus_signature: *(Optional)* If isBogusSignature is set, overrides the signature in the authenticator response to be zero. Defaults to false. + :param is_bad_uv: *(Optional)* If isBadUV is set, overrides the UV bit in the flags in the authenticator response to be zero. Defaults to false. + :param is_bad_up: *(Optional)* If isBadUP is set, overrides the UP bit in the flags in the authenticator response to be zero. Defaults to false. + ''' + params: T_JSON_DICT = dict() + params['authenticatorId'] = authenticator_id.to_json() + if is_bogus_signature is not None: + params['isBogusSignature'] = is_bogus_signature + if is_bad_uv is not None: + params['isBadUV'] = is_bad_uv + if is_bad_up is not None: + params['isBadUP'] = is_bad_up + cmd_dict: T_JSON_DICT = { + 'method': 'WebAuthn.setResponseOverrideBits', + 'params': params, + } + json = yield cmd_dict + + def remove_virtual_authenticator( authenticator_id: AuthenticatorId ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: @@ -388,3 +433,37 @@ def set_automatic_presence_simulation( 'params': params, } json = yield cmd_dict + + +@event_class('WebAuthn.credentialAdded') +@dataclass +class CredentialAdded: + ''' + Triggered when a credential is added to an authenticator. + ''' + authenticator_id: AuthenticatorId + credential: Credential + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CredentialAdded: + return cls( + authenticator_id=AuthenticatorId.from_json(json['authenticatorId']), + credential=Credential.from_json(json['credential']) + ) + + +@event_class('WebAuthn.credentialAsserted') +@dataclass +class CredentialAsserted: + ''' + Triggered when a credential is used in a webauthn assertion. + ''' + authenticator_id: AuthenticatorId + credential: Credential + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> CredentialAsserted: + return cls( + authenticator_id=AuthenticatorId.from_json(json['authenticatorId']), + credential=Credential.from_json(json['credential']) + ) diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py index 5544490..baab7c3 100644 --- a/pycdp/gen/generate.py +++ b/pycdp/gen/generate.py @@ -1071,7 +1071,10 @@ def file_type(path: str): for domain in domains: logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module) (output / f'{domain.module}.py').write_text(domain.generate_code()) - shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py') + try: + shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py') + except shutil.SameFileError: + pass generate_init(output / '__init__.py', domains) (output / 'README.md').write_text(GENERATED_PACKAGE_NOTICE) (output / 'py.typed').touch() diff --git a/pyproject.toml b/pyproject.toml index dbc1529..60f853f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ authors = [ ] [tool.poetry.dependencies] -python = "^3.8" +python = "^3.8, <3.11" deprecated = "1.2.9" inflection = "0.4.0" aiohttp = "3.8.1" diff --git a/update-cdp.sh b/update-cdp.sh index 2b58e7f..2c60c08 100755 --- a/update-cdp.sh +++ b/update-cdp.sh @@ -1,17 +1,29 @@ #!/bin/bash -if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then - rm -f devtools-protocol/* -fi +clean_devtools_directory() { + if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then + rm -f devtools-protocol/* + fi +} -wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json -if [ $? -ne 0 ]; then - echo "Error: Failed to download files" - exit 1 -fi +download_protocol_files() { + if ! wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json; then + echo "Error: Failed to download files" + exit 1 + fi +} -cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output cdp/ -if [ $? -ne 0 ]; then - echo "Error: Failed to execute cdpgen" - exit 1 -fi \ No newline at end of file +generate_cdp_classes() { + if ! cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output pycdp/cdp/; then + echo "Error: Failed to execute cdpgen" + exit 1 + fi +} + +main() { + clean_devtools_directory + download_protocol_files + generate_cdp_classes +} + +main From d380c3a1f4a139631538f32e366976925afa8db5 Mon Sep 17 00:00:00 2001 From: TurboKach Date: Sat, 22 Apr 2023 02:29:31 +0800 Subject: [PATCH 11/27] deleting temp folder after update --- pycdp/cdp/README.md | 5 + pycdp/cdp/device_access.py | 141 ++++++++++++ pycdp/cdp/fed_cm.py | 178 +++++++++++++++ pycdp/cdp/preload.py | 443 +++++++++++++++++++++++++++++++++++++ update-cdp.sh | 8 + 5 files changed, 775 insertions(+) create mode 100644 pycdp/cdp/README.md create mode 100644 pycdp/cdp/device_access.py create mode 100644 pycdp/cdp/fed_cm.py create mode 100644 pycdp/cdp/preload.py diff --git a/pycdp/cdp/README.md b/pycdp/cdp/README.md new file mode 100644 index 0000000..ec8abef --- /dev/null +++ b/pycdp/cdp/README.md @@ -0,0 +1,5 @@ +## Generated by PyCDP +The modules of this package were generated by [pycdp][1], do not modify their contents because the +changes will be overwritten in next generations. + +[1]: https://github.com/HMaker/python-chrome-devtools-protocol diff --git a/pycdp/cdp/device_access.py b/pycdp/cdp/device_access.py new file mode 100644 index 0000000..2b1c60b --- /dev/null +++ b/pycdp/cdp/device_access.py @@ -0,0 +1,141 @@ +# DO NOT EDIT THIS FILE! +# +# This file is generated from the CDP specification. If you need to make +# changes, edit the generator and regenerate all of the modules. +# +# CDP domain: DeviceAccess (experimental) + +from __future__ import annotations +import enum +import typing +from dataclasses import dataclass +from .util import event_class, T_JSON_DICT + + +class RequestId(str): + ''' + Device request id. + ''' + def to_json(self) -> str: + return self + + @classmethod + def from_json(cls, json: str) -> RequestId: + return cls(json) + + def __repr__(self): + return 'RequestId({})'.format(super().__repr__()) + + +class DeviceId(str): + ''' + A device id. + ''' + def to_json(self) -> str: + return self + + @classmethod + def from_json(cls, json: str) -> DeviceId: + return cls(json) + + def __repr__(self): + return 'DeviceId({})'.format(super().__repr__()) + + +@dataclass +class PromptDevice: + ''' + Device information displayed in a user prompt to select a device. + ''' + id_: DeviceId + + #: Display name as it appears in a device request user prompt. + name: str + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['id'] = self.id_.to_json() + json['name'] = self.name + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> PromptDevice: + return cls( + id_=DeviceId.from_json(json['id']), + name=str(json['name']), + ) + + +def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Enable events in this domain. + ''' + cmd_dict: T_JSON_DICT = { + 'method': 'DeviceAccess.enable', + } + json = yield cmd_dict + + +def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Disable events in this domain. + ''' + cmd_dict: T_JSON_DICT = { + 'method': 'DeviceAccess.disable', + } + json = yield cmd_dict + + +def select_prompt( + id_: RequestId, + device_id: DeviceId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Select a device in response to a DeviceAccess.deviceRequestPrompted event. + + :param id_: + :param device_id: + ''' + params: T_JSON_DICT = dict() + params['id'] = id_.to_json() + params['deviceId'] = device_id.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'DeviceAccess.selectPrompt', + 'params': params, + } + json = yield cmd_dict + + +def cancel_prompt( + id_: RequestId + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Cancel a prompt in response to a DeviceAccess.deviceRequestPrompted event. + + :param id_: + ''' + params: T_JSON_DICT = dict() + params['id'] = id_.to_json() + cmd_dict: T_JSON_DICT = { + 'method': 'DeviceAccess.cancelPrompt', + 'params': params, + } + json = yield cmd_dict + + +@event_class('DeviceAccess.deviceRequestPrompted') +@dataclass +class DeviceRequestPrompted: + ''' + A device request opened a user prompt to select a device. Respond with the + selectPrompt or cancelPrompt command. + ''' + id_: RequestId + devices: typing.List[PromptDevice] + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> DeviceRequestPrompted: + return cls( + id_=RequestId.from_json(json['id']), + devices=[PromptDevice.from_json(i) for i in json['devices']] + ) diff --git a/pycdp/cdp/fed_cm.py b/pycdp/cdp/fed_cm.py new file mode 100644 index 0000000..823542b --- /dev/null +++ b/pycdp/cdp/fed_cm.py @@ -0,0 +1,178 @@ +# DO NOT EDIT THIS FILE! +# +# This file is generated from the CDP specification. If you need to make +# changes, edit the generator and regenerate all of the modules. +# +# CDP domain: FedCm (experimental) + +from __future__ import annotations +import enum +import typing +from dataclasses import dataclass +from .util import event_class, T_JSON_DICT + + +class LoginState(enum.Enum): + ''' + Whether this is a sign-up or sign-in action for this account, i.e. + whether this account has ever been used to sign in to this RP before. + ''' + SIGN_IN = "SignIn" + SIGN_UP = "SignUp" + + def to_json(self) -> str: + return self.value + + @classmethod + def from_json(cls, json: str) -> LoginState: + return cls(json) + + +@dataclass +class Account: + ''' + Corresponds to IdentityRequestAccount + ''' + account_id: str + + email: str + + name: str + + given_name: str + + picture_url: str + + idp_config_url: str + + idp_signin_url: str + + login_state: LoginState + + #: These two are only set if the loginState is signUp + terms_of_service_url: typing.Optional[str] = None + + privacy_policy_url: typing.Optional[str] = None + + def to_json(self) -> T_JSON_DICT: + json: T_JSON_DICT = dict() + json['accountId'] = self.account_id + json['email'] = self.email + json['name'] = self.name + json['givenName'] = self.given_name + json['pictureUrl'] = self.picture_url + json['idpConfigUrl'] = self.idp_config_url + json['idpSigninUrl'] = self.idp_signin_url + json['loginState'] = self.login_state.to_json() + if self.terms_of_service_url is not None: + json['termsOfServiceUrl'] = self.terms_of_service_url + if self.privacy_policy_url is not None: + json['privacyPolicyUrl'] = self.privacy_policy_url + return json + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> Account: + return cls( + account_id=str(json['accountId']), + email=str(json['email']), + name=str(json['name']), + given_name=str(json['givenName']), + picture_url=str(json['pictureUrl']), + idp_config_url=str(json['idpConfigUrl']), + idp_signin_url=str(json['idpSigninUrl']), + login_state=LoginState.from_json(json['loginState']), + terms_of_service_url=str(json['termsOfServiceUrl']) if json.get('termsOfServiceUrl', None) is not None else None, + privacy_policy_url=str(json['privacyPolicyUrl']) if json.get('privacyPolicyUrl', None) is not None else None, + ) + + +def enable( + disable_rejection_delay: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + :param disable_rejection_delay: *(Optional)* Allows callers to disable the promise rejection delay that would normally happen, if this is unimportant to what's being tested. (step 4 of https://fedidcg.github.io/FedCM/#browser-api-rp-sign-in) + ''' + params: T_JSON_DICT = dict() + if disable_rejection_delay is not None: + params['disableRejectionDelay'] = disable_rejection_delay + cmd_dict: T_JSON_DICT = { + 'method': 'FedCm.enable', + 'params': params, + } + json = yield cmd_dict + + +def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + + cmd_dict: T_JSON_DICT = { + 'method': 'FedCm.disable', + } + json = yield cmd_dict + + +def select_account( + dialog_id: str, + account_index: int + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + :param dialog_id: + :param account_index: + ''' + params: T_JSON_DICT = dict() + params['dialogId'] = dialog_id + params['accountIndex'] = account_index + cmd_dict: T_JSON_DICT = { + 'method': 'FedCm.selectAccount', + 'params': params, + } + json = yield cmd_dict + + +def dismiss_dialog( + dialog_id: str, + trigger_cooldown: typing.Optional[bool] = None + ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + :param dialog_id: + :param trigger_cooldown: *(Optional)* + ''' + params: T_JSON_DICT = dict() + params['dialogId'] = dialog_id + if trigger_cooldown is not None: + params['triggerCooldown'] = trigger_cooldown + cmd_dict: T_JSON_DICT = { + 'method': 'FedCm.dismissDialog', + 'params': params, + } + json = yield cmd_dict + + +def reset_cooldown() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]: + ''' + Resets the cooldown time, if any, to allow the next FedCM call to show + a dialog even if one was recently dismissed by the user. + ''' + cmd_dict: T_JSON_DICT = { + 'method': 'FedCm.resetCooldown', + } + json = yield cmd_dict + + +@event_class('FedCm.dialogShown') +@dataclass +class DialogShown: + dialog_id: str + accounts: typing.List[Account] + #: These exist primarily so that the caller can verify the + #: RP context was used appropriately. + title: str + subtitle: typing.Optional[str] + + @classmethod + def from_json(cls, json: T_JSON_DICT) -> DialogShown: + return cls( + dialog_id=str(json['dialogId']), + accounts=[Account.from_json(i) for i in json['accounts']], + title=str(json['title']), + subtitle=str(json['subtitle']) if json.get('subtitle', None) is not None else None + ) diff --git a/pycdp/cdp/preload.py b/pycdp/cdp/preload.py new file mode 100644 index 0000000..a36043d --- /dev/null +++ b/pycdp/cdp/preload.py @@ -0,0 +1,443 @@ +# DO NOT EDIT THIS FILE! +# +# This file is generated from the CDP specification. If you need to make +# changes, edit the generator and regenerate all of the modules. +# +# CDP domain: Preload (experimental) + +from __future__ import annotations +import enum +import typing +from dataclasses import dataclass +from .util import event_class, T_JSON_DICT + +from . import dom +from . import network +from . import page + + +class RuleSetId(str): + ''' + Unique id + ''' + def to_json(self) -> str: + return self + + @classmethod + def from_json(cls, json: str) -> RuleSetId: + return cls(json) + + def __repr__(self): + return 'RuleSetId({})'.format(super().__repr__()) + + +@dataclass +class RuleSet: + ''' + Corresponds to SpeculationRuleSet + ''' + id_: RuleSetId + + #: Identifies a document which the rule set is associated with. + loader_id: network.LoaderId + + #: Source text of JSON representing the rule set. If it comes from + #: