From 309534592ae7a9fe38653149955fc35a9cdbc530 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Mon, 28 Oct 2019 13:27:06 -0400
Subject: [PATCH 01/81] Modify header so that warning is a comment, not a doc
The docstring appears in generated documentation and autocomplete, and
the warning isn't helpful in those contexts.
---
generator/generate.py | 22 ++++++++++------------
1 file changed, 10 insertions(+), 12 deletions(-)
diff --git a/generator/generate.py b/generator/generate.py
index 5bf5eb5..428e05d 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -16,22 +16,19 @@
logging.basicConfig(level=log_level)
logger = logging.getLogger('generate')
-SHARED_HEADER = '''DO NOT EDIT THIS FILE
+SHARED_HEADER = '''# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.'''
-This file is generated from the CDP specification. If you need to make changes,
-edit the generator and regenerate all of the modules.'''
-
-INIT_HEADER = '''\'\'\'
-{}
-\'\'\'
+INIT_HEADER = '''{}
'''.format(SHARED_HEADER)
-MODULE_HEADER = '''\'\'\'
-{}
+MODULE_HEADER = '''{}
-Domain: {{}}
-Experimental: {{}}
+\'\'\'
+CDP {{}} Domain{{}}
\'\'\'
from cdp.util import event_class, T_JSON_DICT
@@ -730,7 +727,8 @@ def from_json(cls, domain: dict):
def generate_code(self) -> str:
''' Generate the Python module code for a given CDP domain. '''
- code = MODULE_HEADER.format(self.domain, self.experimental)
+ exp = ' (experimental)' if self.experimental else ''
+ code = MODULE_HEADER.format(self.domain, exp)
import_code = self.generate_imports()
if import_code:
code += import_code
From 48813466ab04d845083d20362779ad98cdf78b29 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 11:08:14 -0500
Subject: [PATCH 02/81] Update annotations (fixes #11)
Annotations are no longer quoted. Instead we use from __future__ import
annotations. Tests are updated to match and all tests pass.
---
generator/generate.py | 28 ++++++++++++++--------------
generator/test_generate.py | 38 +++++++++++++++++++-------------------
mypy.ini | 2 ++
3 files changed, 35 insertions(+), 33 deletions(-)
create mode 100644 mypy.ini
diff --git a/generator/generate.py b/generator/generate.py
index 428e05d..dcb96bb 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -31,6 +31,7 @@
CDP {{}} Domain{{}}
\'\'\'
+from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
from dataclasses import dataclass
import enum
@@ -153,14 +154,14 @@ def py_annotation(self) -> str:
if self.items:
if self.items.ref:
py_ref = ref_to_python(self.items.ref)
- ann = "typing.List['{}']".format(py_ref)
+ ann = "typing.List[{}]".format(py_ref)
else:
ann = 'typing.List[{}]'.format(
CdpPrimitiveType.get_annotation(self.items.type))
else:
if self.ref:
py_ref = ref_to_python(self.ref)
- ann = f"'{py_ref}'"
+ ann = py_ref
else:
ann = CdpPrimitiveType.get_annotation(
typing.cast(str, self.type))
@@ -216,7 +217,7 @@ def generate_to_json(self, dict_: str, use_self: bool=True) -> str:
def generate_from_json(self, dict_) -> str:
''' Generate the code that creates an instance from a JSON dict named
- ``json``. '''
+ ``dict_``. '''
if self.items:
if self.items.ref:
py_ref = ref_to_python(self.items.ref)
@@ -273,10 +274,9 @@ def generate_primitive_code(self) -> str:
if self.items:
if self.items.ref:
nested_type = ref_to_python(self.items.ref)
- py_type = f"typing.List['{nested_type}']"
else:
nested_type = CdpPrimitiveType.get_annotation(self.items.type)
- py_type = f'typing.List[{nested_type}]'
+ py_type = f'typing.List[{nested_type}]'
superclass = 'list'
else:
# A primitive type cannot have a ref, so there is no branch here.
@@ -295,7 +295,7 @@ def to_json(self) -> {py_type}:
def_from_json = dedent(f'''\
@classmethod
- def from_json(cls, json: {py_type}) -> '{self.id}':
+ def from_json(cls, json: {py_type}) -> {self.id}:
return cls(json)''')
code += '\n\n' + indent(def_from_json, 4)
@@ -321,7 +321,7 @@ def to_json(self) -> str:
def_from_json = dedent(f'''\
@classmethod
- def from_json(cls, json: str) -> '{self.id}':
+ def from_json(cls, json: str) -> {self.id}:
return cls(json)''')
code = f'class {self.id}(enum.Enum):\n'
@@ -376,7 +376,7 @@ def to_json(self) -> T_JSON_DICT:
# as above for readability.
def_from_json = dedent(f'''\
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> '{self.id}':
+ def from_json(cls, json: T_JSON_DICT) -> {self.id}:
return cls(
''')
from_jsons = list()
@@ -418,13 +418,13 @@ def generate_code(self) -> str:
if self.items:
if self.items.ref:
nested_type = ref_to_python(self.items.ref)
- py_type = f"typing.List['{nested_type}']"
+ py_type = f"typing.List[{nested_type}]"
else:
nested_type = CdpPrimitiveType.get_annotation(self.items.type)
py_type = f'typing.List[{nested_type}]'
else:
if self.ref:
- py_type = "'{}'".format(ref_to_python(self.ref))
+ py_type = "{}".format(ref_to_python(self.ref))
else:
py_type = CdpPrimitiveType.get_annotation(
typing.cast(str, self.type))
@@ -469,14 +469,14 @@ def py_annotation(self):
if self.items:
if self.items.ref:
py_ref = ref_to_python(self.items.ref)
- ann = f"typing.List['{py_ref}']"
+ ann = f"typing.List[{py_ref}]"
else:
py_type = CdpPrimitiveType.get_annotation(self.items.type)
ann = f'typing.List[{py_type}]'
else:
if self.ref:
py_ref = ref_to_python(self.ref)
- ann = f"'{py_ref}'"
+ ann = f"{py_ref}"
else:
ann = CdpPrimitiveType.get_annotation(self.type)
if self.optional:
@@ -660,7 +660,7 @@ class {self.py_name}:''')
if self.deprecated:
code = f'@deprecated(version="{current_version}")\n' + code
-
+
code += '\n'
if self.description:
code += indent(docstring(self.description), 4)
@@ -670,7 +670,7 @@ class {self.py_name}:''')
code += '\n\n'
def_from_json = dedent(f'''\
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> '{self.py_name}':
+ def from_json(cls, json: T_JSON_DICT) -> {self.py_name}:
return cls(
''')
code += indent(def_from_json, 4)
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 0f1a83c..8abf51a 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -54,7 +54,7 @@ def to_json(self) -> str:
return self
@classmethod
- def from_json(cls, json: str) -> 'AXNodeId':
+ def from_json(cls, json: str) -> AXNodeId:
return cls(json)
def __repr__(self):
@@ -81,11 +81,11 @@ class ArrayOfStrings(list):
'''
Index of the string in the strings table.
'''
- def to_json(self) -> typing.List['StringIndex']:
+ def to_json(self) -> typing.List[StringIndex]:
return self
@classmethod
- def from_json(cls, json: typing.List['StringIndex']) -> 'ArrayOfStrings':
+ def from_json(cls, json: typing.List[StringIndex]) -> ArrayOfStrings:
return cls(json)
def __repr__(self):
@@ -128,7 +128,7 @@ def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> 'AXValueSourceType':
+ def from_json(cls, json: str) -> AXValueSourceType:
return cls(json)""")
type = CdpType.from_json(json_type)
@@ -182,16 +182,16 @@ class AXValue:
A single computed AX property.
'''
#: The type of this value.
- type: 'AXValueType'
+ type: AXValueType
#: The computed value of this property.
value: typing.Optional[typing.Any] = None
#: One or more related nodes, if applicable.
- related_nodes: typing.Optional[typing.List['AXRelatedNode']] = None
+ related_nodes: typing.Optional[typing.List[AXRelatedNode]] = None
#: The sources which contributed to the computation of this property.
- sources: typing.Optional[typing.List['AXValueSource']] = None
+ sources: typing.Optional[typing.List[AXValueSource]] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
@@ -205,7 +205,7 @@ def to_json(self) -> T_JSON_DICT:
return json
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> 'AXValue':
+ def from_json(cls, json: T_JSON_DICT) -> AXValue:
return cls(
type=AXValueType.from_json(json['type']),
value=json['value'] if 'value' in json else None,
@@ -264,11 +264,11 @@ def test_cdp_command():
}
expected = dedent("""\
def get_partial_ax_tree(
- node_id: typing.Optional['dom.NodeId'] = None,
- backend_node_id: typing.Optional['dom.BackendNodeId'] = None,
- object_id: typing.Optional['runtime.RemoteObjectId'] = None,
+ node_id: typing.Optional[dom.NodeId] = None,
+ backend_node_id: typing.Optional[dom.BackendNodeId] = None,
+ object_id: typing.Optional[runtime.RemoteObjectId] = None,
fetch_relatives: typing.Optional[bool] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List['AXNode']]:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
'''
Fetches the accessibility node and partial accessibility tree for this DOM node, if it exists.
@@ -467,7 +467,7 @@ def test_cdp_command_ref_parameter():
expected = dedent("""\
def resolve_animation(
animation_id: str
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,'runtime.RemoteObject']:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,runtime.RemoteObject]:
'''
Gets the remote object of the Animation.
@@ -544,7 +544,7 @@ def test_cdp_command_multiple_return():
}
expected = dedent("""\
def get_encoded_response(
- request_id: 'network.RequestId',
+ request_id: network.RequestId,
encoding: str,
quality: typing.Optional[float] = None,
size_only: typing.Optional[bool] = None
@@ -615,8 +615,8 @@ def test_cdp_command_array_of_ref_parameter():
expected = dedent("""\
def grant_permissions(
origin: str,
- permissions: typing.List['PermissionType'],
- browser_context_id: typing.Optional['target.BrowserContextID'] = None
+ permissions: typing.List[PermissionType],
+ browser_context_id: typing.Optional[target.BrowserContextID] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Grant specific permissions to the given origin and reject all others.
@@ -666,10 +666,10 @@ class RecordingStateChanged:
Called when the recording state for the service has been updated.
'''
is_recording: bool
- service: 'ServiceName'
+ service: ServiceName
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> 'RecordingStateChanged':
+ def from_json(cls, json: T_JSON_DICT) -> RecordingStateChanged:
return cls(
is_recording=bool(json['isRecording']),
service=ServiceName.from_json(json['service'])
@@ -730,7 +730,7 @@ class WindowOpen:
user_gesture: bool
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> 'WindowOpen':
+ def from_json(cls, json: T_JSON_DICT) -> WindowOpen:
return cls(
url=str(json['url']),
window_name=str(json['windowName']),
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..f9e0f72
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,2 @@
+[mypy]
+disallow_any_decorated=False
From 3c8461d59e9d4ce92f533908cd03844efb0690ac Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 12:08:01 -0500
Subject: [PATCH 03/81] Don't generate symbols that shadow built-ins (fixes
#13)
When generating Python names, we now check to see if that name exists in
the `builtins` module. If it does, then we append an underscore. Tests
are updated and I *think* they are passing but I also have some test
failures from a merged branch.
---
generator/generate.py | 37 +++++++++++++++-----
generator/test_generate.py | 70 +++++++++++++++++---------------------
2 files changed, 61 insertions(+), 46 deletions(-)
diff --git a/generator/generate.py b/generator/generate.py
index d722c23..56c9886 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -1,3 +1,4 @@
+import builtins
from dataclasses import dataclass
from enum import Enum
import itertools
@@ -41,10 +42,12 @@
current_version = ''
+
def indent(s: str, n: int):
''' A shortcut for ``textwrap.indent`` that always uses spaces. '''
return tw_indent(s, n * ' ')
+
def clear_dirs(package_path: Path):
''' Remove generated code. '''
def rmdir(path):
@@ -82,6 +85,24 @@ def docstring(description: typing.Optional[str]) -> str:
return dedent("'''\n{}\n'''").format(description)
+def is_builtin(name: str) -> bool:
+ ''' Return True if ``name`` would shadow a builtin. '''
+ try:
+ getattr(builtins, name)
+ return True
+ except AttributeError:
+ return False
+
+
+def snake_case(name: str) -> str:
+ ''' Convert a camel case name to snake case. If the name would shadow a
+ Python builtin, then append an underscore. '''
+ name = inflection.underscore(name)
+ if is_builtin(name):
+ name += '_'
+ return name
+
+
def ref_to_python(ref: str) -> str:
'''
Convert a CDP ``$ref`` to the name of a Python type.
@@ -90,7 +111,7 @@ def ref_to_python(ref: str) -> str:
'''
if '.' in ref:
domain, subtype = ref.split('.')
- ref = '{}.{}'.format(inflection.underscore(domain), subtype)
+ ref = '{}.{}'.format(snake_case(domain), subtype)
return f"{ref}"
@@ -148,7 +169,7 @@ class CdpProperty:
@property
def py_name(self) -> str:
''' Get this property's Python name. '''
- return inflection.underscore(self.name)
+ return snake_case(self.name)
@property
def py_annotation(self) -> str:
@@ -333,8 +354,8 @@ def from_json(cls, json: str) -> {self.id}:
if doc:
code += indent(doc, 4) + '\n'
for enum_member in self.enum:
- snake_case = inflection.underscore(enum_member).upper()
- enum_code = f'{snake_case} = "{enum_member}"\n'
+ snake_name = snake_case(enum_member).upper()
+ enum_code = f'{snake_name} = "{enum_member}"\n'
code += indent(enum_code, 4)
code += '\n' + indent(def_to_json, 4)
code += '\n\n' + indent(def_from_json, 4)
@@ -526,7 +547,7 @@ class CdpCommand:
@property
def py_name(self):
''' Get a Python name for this command. '''
- return inflection.underscore(self.name)
+ return snake_case(self.name)
@classmethod
def from_json(cls, command, domain) -> 'CdpCommand':
@@ -686,7 +707,7 @@ class {self.py_name}:''')
if self.description:
desc += self.description
-
+
code += indent(docstring(desc), 4)
code += '\n'
code += indent(
@@ -729,7 +750,7 @@ class CdpDomain:
@property
def module(self):
''' The name of the Python module for this CDP domain. '''
- return inflection.underscore(self.domain)
+ return snake_case(self.domain)
@classmethod
def from_json(cls, domain: dict):
@@ -797,7 +818,7 @@ def generate_imports(self):
except ValueError:
continue
if domain != self.domain:
- dependencies.add(inflection.underscore(domain))
+ dependencies.add(snake_case(domain))
code = '\n'.join(f'from . import {d}' for d in sorted(dependencies))
if needs_deprecation:
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 8abf51a..8a49a96 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -34,8 +34,6 @@ def test_docstring():
- from 'activedescendant' to 'owns' - relationships between elements other than parent/child/sibling.
'''""")
actual = docstring(description)
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -62,8 +60,6 @@ def __repr__(self):
type = CdpType.from_json(json_type)
actual = type.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -93,8 +89,6 @@ def __repr__(self):
type = CdpType.from_json(json_type)
actual = type.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -133,8 +127,6 @@ def from_json(cls, json: str) -> AXValueSourceType:
type = CdpType.from_json(json_type)
actual = type.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -182,7 +174,7 @@ class AXValue:
A single computed AX property.
'''
#: The type of this value.
- type: AXValueType
+ type_: AXValueType
#: The computed value of this property.
value: typing.Optional[typing.Any] = None
@@ -195,7 +187,7 @@ class AXValue:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['type'] = self.type.to_json()
+ json['type'] = self.type_.to_json()
if self.value is not None:
json['value'] = self.value
if self.related_nodes is not None:
@@ -207,7 +199,7 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> AXValue:
return cls(
- type=AXValueType.from_json(json['type']),
+ type_=AXValueType.from_json(json['type']),
value=json['value'] if 'value' in json else None,
related_nodes=[AXRelatedNode.from_json(i) for i in json['relatedNodes']] if 'relatedNodes' in json else None,
sources=[AXValueSource.from_json(i) for i in json['sources']] if 'sources' in json else None,
@@ -215,8 +207,6 @@ def from_json(cls, json: T_JSON_DICT) -> AXValue:
type = CdpType.from_json(json_type)
actual = type.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -297,8 +287,6 @@ def get_partial_ax_tree(
cmd = CdpCommand.from_json(json_cmd, 'Accessibility')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -319,8 +307,6 @@ def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
cmd = CdpCommand.from_json(json_cmd, 'Accessibility')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -345,16 +331,16 @@ def test_cdp_command_return_primitive():
}
expected = dedent("""\
def get_current_time(
- id: str
+ id_: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,float]:
'''
Returns the current time of the an animation.
- :param id: Id of animation.
+ :param id_: Id of animation.
:returns: Current time of the page.
'''
params: T_JSON_DICT = dict()
- params['id'] = id
+ params['id'] = id_
cmd_dict: T_JSON_DICT = {
'method': 'Animation.getCurrentTime',
'params': params,
@@ -364,8 +350,6 @@ def get_current_time(
cmd = CdpCommand.from_json(json_cmd, 'Animation')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -401,8 +385,6 @@ def get_browser_command_line() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typin
cmd = CdpCommand.from_json(json_cmd, 'Browser')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -440,8 +422,6 @@ def release_animations(
cmd = CdpCommand.from_json(json_cmd, 'Animation')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -485,8 +465,6 @@ def resolve_animation(
cmd = CdpCommand.from_json(json_cmd, 'Animation')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -582,8 +560,6 @@ def get_encoded_response(
cmd = CdpCommand.from_json(json_cmd, 'Audits')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -638,8 +614,6 @@ def grant_permissions(
cmd = CdpCommand.from_json(json_cmd, 'Browser')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -677,8 +651,6 @@ def from_json(cls, json: T_JSON_DICT) -> RecordingStateChanged:
cmd = CdpEvent.from_json(json_event, 'BackgroundService')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -740,8 +712,6 @@ def from_json(cls, json: T_JSON_DICT) -> WindowOpen:
cmd = CdpEvent.from_json(json_event, 'Page')
actual = cmd.generate_code()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
@@ -846,6 +816,30 @@ def test_cdp_domain_imports():
domain = CdpDomain.from_json(json_domain)
actual = domain.generate_imports()
- print('EXPECTED:', expected)
- print('ACTUAL:', actual)
assert expected == actual
+
+
+def test_domain_shadows_builtin():
+ ''' If a domain name shadows a Python builtin, it should have an underscore
+ appended to the module name. '''
+ input_domain = {
+ "domain": "Input",
+ "types": [],
+ "commands": [],
+ "events": [],
+ }
+ domain = CdpDomain.from_json(input_domain)
+ assert domain.module == 'input_'
+
+
+def test_domain_shadows_builtin():
+ ''' If a domain name shadows a Python builtin, it should have an underscore
+ appended to the module name. '''
+ input_domain = {
+ "domain": "Input",
+ "types": [],
+ "commands": [],
+ "events": [],
+ }
+ domain = CdpDomain.from_json(input_domain)
+ assert domain.module == 'input_'
From 6ae50016520bc8382cbe6e27987fb94eeb2e223f Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 13:00:09 -0500
Subject: [PATCH 04/81] Create a Sphinx document for each module (#10)
Each module now has its own Sphinx doc generated in docs/api/. The file
api.py contains a toctree directive and a glob so that all modules are
automatically listed in the sidebar. This makes it easier to jump to a
specific module, and also the API docs load *much faster*.
---
Makefile | 7 +-
docs/api.rst | 138 ++-----------------------------------
docs/develop.rst | 7 +-
generator/generate.py | 63 +++++++++++------
generator/test_generate.py | 84 +++++++++++++++++++---
5 files changed, 133 insertions(+), 166 deletions(-)
diff --git a/Makefile b/Makefile
index 8d7b78a..ce2693f 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,9 @@
-all: mypy-generate test-generate generate test-import mypy-cdp test-cdp
+.PHONY: docs
+
+default: mypy-generate test-generate generate test-import mypy-cdp test-cdp
+
+docs:
+ $(MAKE) -C docs html
generate:
python generator/generate.py
diff --git a/docs/api.rst b/docs/api.rst
index fbfe6fd..32b57a9 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -1,134 +1,10 @@
-API Reference
-=============
+API Modules
+===========
-.. automodule:: cdp.accessibility
- :members:
+Each module in the API corresponds to a "domain" in CDP and may contain types,
+commands, and events.
-.. automodule:: cdp.animation
- :members:
+.. toctree::
+ :glob:
-.. automodule:: cdp.application_cache
- :members:
-
-.. automodule:: cdp.audits
- :members:
-
-.. automodule:: cdp.background_service
- :members:
-
-.. automodule:: cdp.browser
- :members:
-
-.. automodule:: cdp.cache_storage
- :members:
-
-.. automodule:: cdp.cast
- :members:
-
-.. automodule:: cdp.console
- :members:
-
-.. automodule:: cdp.css
- :members:
-
-.. automodule:: cdp.database
- :members:
-
-.. automodule:: cdp.debugger
- :members:
-
-.. automodule:: cdp.device_orientation
- :members:
-
-.. automodule:: cdp.dom
- :members:
-
-.. automodule:: cdp.dom_debugger
- :members:
-
-.. automodule:: cdp.dom_snapshot
- :members:
-
-.. automodule:: cdp.dom_storage
- :members:
-
-.. automodule:: cdp.emulation
- :members:
-
-.. automodule:: cdp.fetch
- :members:
-
-.. automodule:: cdp.headless_experimental
- :members:
-
-.. automodule:: cdp.heap_profiler
- :members:
-
-.. automodule:: cdp.indexed_db
- :members:
-
-.. automodule:: cdp.input
- :members:
-
-.. automodule:: cdp.inspector
- :members:
-
-.. automodule:: cdp.io
- :members:
-
-.. automodule:: cdp.layer_tree
- :members:
-
-.. automodule:: cdp.log
- :members:
-
-.. automodule:: cdp.memory
- :members:
-
-.. automodule:: cdp.network
- :members:
-
-.. automodule:: cdp.overlay
- :members:
-
-.. automodule:: cdp.page
- :members:
-
-.. automodule:: cdp.performance
- :members:
-
-.. automodule:: cdp.profiler
- :members:
-
-.. automodule:: cdp.runtime
- :members:
-
-.. automodule:: cdp.schema
- :members:
-
-.. automodule:: cdp.security
- :members:
-
-.. automodule:: cdp.service_worker
- :members:
-
-.. automodule:: cdp.storage
- :members:
-
-.. automodule:: cdp.system_info
- :members:
-
-.. automodule:: cdp.target
- :members:
-
-.. automodule:: cdp.tethering
- :members:
-
-.. automodule:: cdp.tracing
- :members:
-
-.. automodule:: cdp.web_audio
- :members:
-
-.. automodule:: cdp.web_authn
- :members:
+ api/*
diff --git a/docs/develop.rst b/docs/develop.rst
index 948d1e8..5422d2b 100644
--- a/docs/develop.rst
+++ b/docs/develop.rst
@@ -35,9 +35,10 @@ Note that the verification in this project occurs in two phases:
2. Run the generator.
3. Verify the *generated* code.
-We focus on more effort on step 1, because if the generator is correct then the
-generated code is correct by definition. The default ``make`` target runs all of
-these targets in order, serving as a quick way to verify the entire project.
+We focus most of the effort on step 1, because if the generator is correct then
+the generated code is correct by definition. The default ``make`` target runs
+all of these targets in order, serving as a quick way to verify the entire
+project.
To make documentation (i.e. the docs you're reading right now) go into the
``docs/`` directory and run ``make html``.
diff --git a/generator/generate.py b/generator/generate.py
index 56c9886..61a7466 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -48,26 +48,6 @@ def indent(s: str, n: int):
return tw_indent(s, n * ' ')
-def clear_dirs(package_path: Path):
- ''' Remove generated code. '''
- def rmdir(path):
- for subpath in path.iterdir():
- if subpath.is_file():
- subpath.unlink()
- elif subpath.is_dir():
- rmdir(subpath)
- path.rmdir()
-
- try:
- (package_path / '__init__.py').unlink()
- except FileNotFoundError:
- pass
-
- for subpath in package_path.iterdir():
- if subpath.is_dir():
- rmdir(subpath)
-
-
def inline_doc(description) -> str:
''' Generate an inline doc, e.g. ``#: This type is a ...`` '''
if not description:
@@ -741,6 +721,7 @@ def get_refs(self):
class CdpDomain:
''' A CDP domain contains metadata, types, commands, and events. '''
domain: str
+ description: typing.Optional[str]
experimental: bool
dependencies: typing.List[str]
types: typing.List[CdpType]
@@ -762,6 +743,7 @@ def from_json(cls, domain: dict):
return cls(
domain_name,
+ domain.get('description'),
domain.get('experimental', False),
domain.get('dependencies', list()),
[CdpType.from_json(type) for type in types],
@@ -826,6 +808,21 @@ def generate_imports(self):
return code
+ def generate_sphinx(self) -> str:
+ '''
+ Generate a Sphinx document for this domain.
+ '''
+ docs = self.domain + '\n'
+ docs += '=' * len(self.domain) + '\n\n'
+
+ if self.description:
+ docs += f'{self.description}\n\n'
+
+ docs += f'.. automodule:: cdp.{self.module}\n'
+ docs += ' :members:\n'
+
+ return docs
+
def parse(json_path, output_path):
'''
@@ -862,6 +859,22 @@ def generate_init(init_path, domains):
init_file.write('import cdp.{}\n'.format(domain.module))
+def generate_docs(docs_path, domains):
+ '''
+ Generate Sphinx documents for each domain.
+ '''
+ logger.info('Generating Sphinx documents')
+
+ # Remove generated documents
+ for subpath in docs_path.iterdir():
+ subpath.unlink()
+
+ # Generate document for each domain
+ for domain in domains:
+ doc = docs_path / f'{domain.module}.rst'
+ with doc.open('w') as f:
+ f.write(domain.generate_sphinx())
+
def main():
''' Main entry point. '''
here = Path(__file__).parent.resolve()
@@ -871,8 +884,13 @@ def main():
]
output_path = here.parent / 'cdp'
output_path.mkdir(exist_ok=True)
- clear_dirs(output_path)
+ # Remove generated code
+ for subpath in output_path.iterdir():
+ if subpath.is_file() and subpath.name not in ('py.typed', 'util.py'):
+ subpath.unlink()
+
+ # Parse domains
domains = list()
for json_path in json_paths:
logger.info('Parsing JSON file %s', json_path)
@@ -899,6 +917,9 @@ def main():
init_path = output_path / '__init__.py'
generate_init(init_path, domains)
+ docs_path = here.parent / 'docs' / 'api'
+ generate_docs(docs_path, domains)
+
py_typed_path = output_path / 'py.typed'
py_typed_path.touch()
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 8a49a96..d607c86 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -832,14 +832,78 @@ def test_domain_shadows_builtin():
assert domain.module == 'input_'
-def test_domain_shadows_builtin():
- ''' If a domain name shadows a Python builtin, it should have an underscore
- appended to the module name. '''
- input_domain = {
- "domain": "Input",
- "types": [],
- "commands": [],
- "events": [],
+def test_cdp_domain_sphinx():
+ json_domain = {
+ "domain": "Animation",
+ "description": "This is the animation domain.",
+ "experimental": True,
+ "dependencies": [
+ "Runtime",
+ "DOM"
+ ],
+ "types": [
+ {
+ "id": "KeyframeStyle",
+ "description": "Keyframe Style",
+ "type": "object",
+ "properties": [
+ {
+ "name": "offset",
+ "description": "Keyframe's time offset.",
+ "type": "string"
+ },
+ {
+ "name": "easing",
+ "description": "`AnimationEffect`'s timing function.",
+ "type": "string"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "getCurrentTime",
+ "description": "Returns the current time of the an animation.",
+ "parameters": [
+ {
+ "name": "id",
+ "description": "Id of animation.",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "currentTime",
+ "description": "Current time of the page.",
+ "type": "number"
+ }
+ ]
+ },
+ ],
+ "events": [
+ {
+ "name": "animationCanceled",
+ "description": "Event for when an animation has been cancelled.",
+ "parameters": [
+ {
+ "name": "id",
+ "description": "Id of the animation that was cancelled.",
+ "type": "string"
+ }
+ ]
+ },
+ ]
}
- domain = CdpDomain.from_json(input_domain)
- assert domain.module == 'input_'
+ ''' A CDP domain should generate Sphinx documentation. '''
+ expected = dedent("""\
+ Animation
+ =========
+
+ This is the animation domain.
+
+ .. automodule:: cdp.animation
+ :members:
+ """)
+ domain = CdpDomain.from_json(json_domain)
+ actual = domain.generate_sphinx()
+ assert expected == actual
From 7ba80e0b1dfd68ec3bba5156c81e00945f9cbde1 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 14:16:23 -0500
Subject: [PATCH 05/81] Fix Sphinx build warnings (#10)
There were a few obscure warnings about backticks and pipes, as well
as some indendation issues. Those are all fixed now.
---
generator/generate.py | 58 ++++++++++++++++++++++++++++++--------
generator/test_generate.py | 10 +++++++
2 files changed, 57 insertions(+), 11 deletions(-)
diff --git a/generator/generate.py b/generator/generate.py
index 61a7466..5308927 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -7,6 +7,7 @@
import operator
import os
from pathlib import Path
+import re
from textwrap import dedent, indent as tw_indent
import typing
@@ -48,11 +49,39 @@ def indent(s: str, n: int):
return tw_indent(s, n * ' ')
+BACKTICK_RE = re.compile(r'`([^`]+)`(\w+)?')
+
+
+def escape_backticks(docstr: str) -> str:
+ '''
+ Escape backticks in a docstring by doubling them up.
+
+ This is a little tricky because RST requires a non-letter character after
+ the closing backticks, but some CDPs docs have things like "`AxNodeId`s".
+ If we double the backticks in that string, then it won't be valid RST. The
+ fix is to insert an apostrophe if an "s" trails the backticks.
+ '''
+ def replace_one(match):
+ if match.group(2) == 's':
+ return f"``{match.group(1)}``'s"
+ elif match.group(2):
+ # This case (some trailer other than "s") doesn't currently exist
+ # in the CDP definitions, but it's here just to be safe.
+ return f'``{match.group(1)}`` {match.group(2)}'
+ else:
+ return f'``{match.group(1)}``'
+
+ # Sometimes pipes are used where backticks should have been used.
+ docstr = docstr.replace('|', '`')
+ return BACKTICK_RE.sub(replace_one, docstr)
+
+
def inline_doc(description) -> str:
''' Generate an inline doc, e.g. ``#: This type is a ...`` '''
if not description:
return ''
+ description = escape_backticks(description)
lines = ['#: {}'.format(l) for l in description.split('\n')]
return '\n'.join(lines)
@@ -62,6 +91,7 @@ def docstring(description: typing.Optional[str]) -> str:
if not description:
return ''
+ description = escape_backticks(description)
return dedent("'''\n{}\n'''").format(description)
@@ -501,7 +531,7 @@ def py_annotation(self):
def generate_doc(self):
''' Generate the docstring for this return. '''
if self.description:
- doc = self.description.replace('`', '``')
+ doc = self.description.replace('`', '``').replace('\n', ' ')
if self.optional:
doc = f'*(Optional)* {doc}'
else:
@@ -575,14 +605,13 @@ def generate_code(self) -> str:
code += ret
# Generate the docstring
+ doc = ''
+ if self.description:
+ doc = self.description
if self.deprecated:
- doc = f'.. deprecated:: {current_version}'
- else:
- doc = ''
+ doc += f'\n\n.. deprecated:: {current_version}'
if self.experimental:
- doc += f'**EXPERIMENTAL**\n\n'
- if self.description:
- doc += self.description
+ doc += f'\n\n**EXPERIMENTAL**'
if self.parameters and doc:
doc += '\n\n'
elif not self.parameters and self.returns:
@@ -875,6 +904,7 @@ def generate_docs(docs_path, domains):
with doc.open('w') as f:
f.write(domain.generate_sphinx())
+
def main():
''' Main entry point. '''
here = Path(__file__).parent.resolve()
@@ -897,15 +927,21 @@ def main():
domains.extend(parse(json_path, output_path))
domains.sort(key=operator.attrgetter('domain'))
- # The DOM domain includes an erroneous $ref that refers to itself. It's
- # easier to patch that here than it is to modify the generator code.
+ # Patch up CDP errors. It's easier to patch that here than it is to modify
+ # the generator code.
+ # 1. DOM includes an erroneous $ref that refers to itself.
+ # 2. Page includes an event with an extraneous backtick in the description.
for domain in domains:
if domain.domain == 'DOM':
for cmd in domain.commands:
if cmd.name == 'resolveNode':
+ # Patch 1
cmd.parameters[1].ref = 'BackendNodeId'
- break
- break
+ elif domain.domain == 'Page':
+ for event in domain.events:
+ if event.name == 'screencastVisibilityChanged':
+ # Patch 2
+ event.description = event.description.replace('`', '')
for domain in domains:
logger.info('Generating module: %s → %s.py', domain.domain,
diff --git a/generator/test_generate.py b/generator/test_generate.py
index d607c86..37d8902 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -37,6 +37,16 @@ def test_docstring():
assert expected == actual
+def test_escape_docstring():
+ description = 'Escape a `Backtick` and some `Backtick`s.'
+ expected = dedent("""\
+ '''
+ Escape a ``Backtick`` and some ``Backtick``'s.
+ '''""")
+ actual = docstring(description)
+ assert expected == actual
+
+
def test_cdp_primitive_type():
json_type = {
"id": "AXNodeId",
From d2be9c8c4a6aeffc561fefb2ce792fcd22ffda88 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 15:07:03 -0500
Subject: [PATCH 06/81] Clean up CDP module pages #10
Each module page now has separate sections for types, commands, and
events.
---
generator/generate.py | 24 +++++++++++++++++-------
generator/test_generate.py | 24 ++++++++++++++++++++++--
2 files changed, 39 insertions(+), 9 deletions(-)
diff --git a/generator/generate.py b/generator/generate.py
index 5308927..727758e 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -28,10 +28,8 @@
'''.format(SHARED_HEADER)
MODULE_HEADER = '''{}
-
-\'\'\'
-CDP {{}} Domain{{}}
-\'\'\'
+#
+# CDP domain: {{}}{{}}
from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
@@ -843,12 +841,24 @@ def generate_sphinx(self) -> str:
'''
docs = self.domain + '\n'
docs += '=' * len(self.domain) + '\n\n'
-
if self.description:
docs += f'{self.description}\n\n'
+ if self.experimental:
+ docs += '*This CDP domain is experimental.*\n\n'
+ docs += f'.. module:: cdp.{self.module}\n\n'
+ docs += '* Types_\n* Commands_\n* Events_\n\n'
- docs += f'.. automodule:: cdp.{self.module}\n'
- docs += ' :members:\n'
+ docs += 'Types\n-----\n'
+ for type in self.types:
+ docs += f'\n.. autoclass:: {type.id}\n'
+
+ docs += '\nCommands\n--------\n'
+ for command in sorted(self.commands, key=operator.attrgetter('py_name')):
+ docs += f'\n.. autofunction:: {command.py_name}\n'
+
+ docs += '\nEvents\n------\n'
+ for event in self.events:
+ docs += f'\n.. autoclass:: {event.py_name}\n'
return docs
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 37d8902..ec66160 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -911,8 +911,28 @@ def test_cdp_domain_sphinx():
This is the animation domain.
- .. automodule:: cdp.animation
- :members:
+ *This CDP domain is experimental.*
+
+ .. module:: cdp.animation
+
+ * Types_
+ * Commands_
+ * Events_
+
+ Types
+ -----
+
+ .. autoclass:: KeyframeStyle
+
+ Commands
+ --------
+
+ .. autofunction:: get_current_time
+
+ Events
+ ------
+
+ .. autoclass:: AnimationCanceled
""")
domain = CdpDomain.from_json(json_domain)
actual = domain.generate_sphinx()
From 29d27981363e0a790edcd11f3822362d2579b5ae Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 15:27:49 -0500
Subject: [PATCH 07/81] Fix generator unit tests #10
These were broken when I merged in a branch earlier.
---
generator/generate.py | 4 ++--
generator/test_generate.py | 32 +++++++++++++++++++-------------
2 files changed, 21 insertions(+), 15 deletions(-)
diff --git a/generator/generate.py b/generator/generate.py
index 727758e..83463a1 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -529,7 +529,7 @@ def py_annotation(self):
def generate_doc(self):
''' Generate the docstring for this return. '''
if self.description:
- doc = self.description.replace('`', '``').replace('\n', ' ')
+ doc = self.description.replace('\n', ' ')
if self.optional:
doc = f'*(Optional)* {doc}'
else:
@@ -622,7 +622,7 @@ def generate_code(self) -> str:
elif len(self.returns) > 1:
doc += '\n'
doc += ':returns: A tuple with the following items:\n\n'
- ret_docs = '\n'.join(f'{i+1}. **{r.name}** - {r.generate_doc()}' for i, r
+ ret_docs = '\n'.join(f'{i}. **{r.name}** – {r.generate_doc()}' for i, r
in enumerate(self.returns))
doc += indent(ret_docs, 4)
if doc:
diff --git a/generator/test_generate.py b/generator/test_generate.py
index ec66160..226e1e1 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -272,12 +272,13 @@ def get_partial_ax_tree(
'''
Fetches the accessibility node and partial accessibility tree for this DOM node, if it exists.
- :param node_id: Identifier of the node to get the partial accessibility tree for.
- :param backend_node_id: Identifier of the backend node to get the partial accessibility tree for.
- :param object_id: JavaScript object id of the node wrapper to get the partial accessibility tree for.
- :param fetch_relatives: Whether to fetch this nodes ancestors, siblings and children. Defaults to true.
- :returns: The ``Accessibility.AXNode`` for this DOM node, if it exists, plus its ancestors, siblings and
- children, if requested.
+ **EXPERIMENTAL**
+
+ :param node_id: *(Optional)* Identifier of the node to get the partial accessibility tree for.
+ :param backend_node_id: *(Optional)* Identifier of the backend node to get the partial accessibility tree for.
+ :param object_id: *(Optional)* JavaScript object id of the node wrapper to get the partial accessibility tree for.
+ :param fetch_relatives: *(Optional)* Whether to fetch this nodes ancestors, siblings and children. Defaults to true.
+ :returns: The ``Accessibility.AXNode`` for this DOM node, if it exists, plus its ancestors, siblings and children, if requested.
'''
params: T_JSON_DICT = dict()
if node_id is not None:
@@ -385,6 +386,8 @@ def get_browser_command_line() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typin
Returns the command line switches for the browser process if, and only if
--enable-automation is on the commandline.
+ **EXPERIMENTAL**
+
:returns: Commandline parameters
'''
cmd_dict: T_JSON_DICT = {
@@ -543,12 +546,13 @@ def get_encoded_response(
:param request_id: Identifier of the network request to get content for.
:param encoding: The encoding to use.
- :param quality: The quality of the encoding (0-1). (defaults to 1)
- :param size_only: Whether to only return the size information (defaults to false).
- :returns: a tuple with the following items:
- 0. body: (Optional) The encoded body as a base64 string. Omitted if sizeOnly is true.
- 1. originalSize: Size before re-encoding.
- 2. encodedSize: Size after re-encoding.
+ :param quality: *(Optional)* The quality of the encoding (0-1). (defaults to 1)
+ :param size_only: *(Optional)* Whether to only return the size information (defaults to false).
+ :returns: A tuple with the following items:
+
+ 0. **body** – *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
+ 1. **originalSize** – Size before re-encoding.
+ 2. **encodedSize** – Size after re-encoding.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
@@ -607,9 +611,11 @@ def grant_permissions(
'''
Grant specific permissions to the given origin and reject all others.
+ **EXPERIMENTAL**
+
:param origin:
:param permissions:
- :param browser_context_id: BrowserContext to override permissions. When omitted, default browser context is used.
+ :param browser_context_id: *(Optional)* BrowserContext to override permissions. When omitted, default browser context is used.
'''
params: T_JSON_DICT = dict()
params['origin'] = origin
From 9e4d5edd1099a87ecb37324c5b5f01a526c85ddf Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 15:41:21 -0500
Subject: [PATCH 08/81] Show more details in API docs #10
This includes members of enums, plus fields for class types and events.
---
generator/generate.py | 10 ++++++++++
generator/test_generate.py | 2 ++
2 files changed, 12 insertions(+)
diff --git a/generator/generate.py b/generator/generate.py
index 83463a1..18c76a5 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -849,8 +849,15 @@ def generate_sphinx(self) -> str:
docs += '* Types_\n* Commands_\n* Events_\n\n'
docs += 'Types\n-----\n'
+ docs += '\nGenerally you do not need to instantiate CDP types ' \
+ 'yourself. Instead, the API creates objects for you as return ' \
+ 'values from commands, and then you can use those objects as ' \
+ 'arguments to other commands.\n'
for type in self.types:
docs += f'\n.. autoclass:: {type.id}\n'
+ docs += ' :members:\n'
+ docs += ' :undoc-members:\n'
+ docs += ' :exclude-members: from_json, to_json\n'
docs += '\nCommands\n--------\n'
for command in sorted(self.commands, key=operator.attrgetter('py_name')):
@@ -859,6 +866,9 @@ def generate_sphinx(self) -> str:
docs += '\nEvents\n------\n'
for event in self.events:
docs += f'\n.. autoclass:: {event.py_name}\n'
+ docs += ' :members:\n'
+ docs += ' :undoc-members:\n'
+ docs += ' :exclude-members: from_json, to_json\n'
return docs
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 226e1e1..0d19459 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -928,6 +928,8 @@ def test_cdp_domain_sphinx():
Types
-----
+ Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+
.. autoclass:: KeyframeStyle
Commands
From acb8a191888eb074e70ab9186e47b82dccfeae16 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 15:47:31 -0500
Subject: [PATCH 09/81] A little more cleanup of docs #10
---
docs/api.rst | 1 +
docs/index.rst | 4 +++-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/docs/api.rst b/docs/api.rst
index 32b57a9..7060e46 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -6,5 +6,6 @@ commands, and events.
.. toctree::
:glob:
+ :maxdepth: 1
api/*
diff --git a/docs/index.rst b/docs/index.rst
index 4c1142b..76930fa 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -3,8 +3,11 @@
PyCDP
=====
+Python wrappers for Chrome DevTools Protocol (CDP).
+
.. toctree::
:caption: Contents
+ :maxdepth: 1
overview
getting_started
@@ -16,7 +19,6 @@ Indices and tables
* :ref:`genindex`
* :ref:`modindex`
-* :ref:`search`
.. image:: https://hyperiongray.s3.amazonaws.com/define-hg.svg
From 934a42427435692d6d5dc3936b765c4730919a7f Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Fri, 28 Feb 2020 15:48:58 -0500
Subject: [PATCH 10/81] Regenerate code and docs
---
cdp/__init__.py | 12 +-
cdp/accessibility.py | 117 ++++----
cdp/animation.py | 112 ++++---
cdp/application_cache.py | 51 ++--
cdp/audits.py | 26 +-
cdp/background_service.py | 48 ++-
cdp/browser.py | 124 ++++----
cdp/cache_storage.py | 59 ++--
cdp/cast.py | 40 ++-
cdp/console.py | 26 +-
cdp/css.py | 307 ++++++++++---------
cdp/database.py | 46 ++-
cdp/debugger.py | 250 ++++++++--------
cdp/device_orientation.py | 18 +-
cdp/dom.py | 429 +++++++++++++--------------
cdp/dom_debugger.py | 76 +++--
cdp/dom_snapshot.py | 210 ++++++-------
cdp/dom_storage.py | 48 ++-
cdp/emulation.py | 82 ++---
cdp/fetch.py | 90 +++---
cdp/headless_experimental.py | 37 ++-
cdp/heap_profiler.py | 64 ++--
cdp/indexed_db.py | 82 +++--
cdp/{input.py => input_.py} | 94 +++---
cdp/inspector.py | 24 +-
cdp/io.py | 36 ++-
cdp/layer_tree.py | 90 +++---
cdp/log.py | 38 ++-
cdp/memory.py | 46 ++-
cdp/network.py | 460 +++++++++++++++--------------
cdp/overlay.py | 88 +++---
cdp/page.py | 454 ++++++++++++++--------------
cdp/performance.py | 30 +-
cdp/profiler.py | 98 +++---
cdp/runtime.py | 342 +++++++++++----------
cdp/schema.py | 22 +-
cdp/security.py | 70 ++---
cdp/service_worker.py | 60 ++--
cdp/storage.py | 42 ++-
cdp/system_info.py | 89 +++---
cdp/target.py | 164 +++++-----
cdp/tethering.py | 18 +-
cdp/tracing.py | 56 ++--
cdp/web_audio.py | 52 ++--
cdp/web_authn.py | 50 ++--
docs/api/accessibility.rst | 79 +++++
docs/api/animation.rst | 76 +++++
docs/api/application_cache.rst | 54 ++++
docs/api/audits.rst | 25 ++
docs/api/background_service.rst | 56 ++++
docs/api/browser.rst | 77 +++++
docs/api/cache_storage.rst | 61 ++++
docs/api/cast.rst | 49 +++
docs/api/console.rst | 37 +++
docs/api/css.rst | 212 +++++++++++++
docs/api/database.rst | 49 +++
docs/api/debugger.rst | 145 +++++++++
docs/api/device_orientation.rst | 25 ++
docs/api/dom.rst | 236 +++++++++++++++
docs/api/dom_debugger.rst | 50 ++++
docs/api/dom_snapshot.rst | 106 +++++++
docs/api/dom_storage.rst | 65 ++++
docs/api/emulation.rst | 78 +++++
docs/api/fetch.rst | 79 +++++
docs/api/headless_experimental.rst | 39 +++
docs/api/heap_profiler.rst | 90 ++++++
docs/api/indexed_db.rst | 74 +++++
docs/api/input_.rst | 52 ++++
docs/api/inspector.rst | 40 +++
docs/api/io.rst | 32 ++
docs/api/layer_tree.rst | 84 ++++++
docs/api/log.rst | 46 +++
docs/api/memory.rst | 61 ++++
docs/api/network.rst | 363 +++++++++++++++++++++++
docs/api/overlay.rst | 89 ++++++
docs/api/page.rst | 329 +++++++++++++++++++++
docs/api/performance.rst | 37 +++
docs/api/profiler.rst | 98 ++++++
docs/api/runtime.rst | 214 ++++++++++++++
docs/api/schema.rst | 28 ++
docs/api/security.rst | 71 +++++
docs/api/service_worker.rst | 90 ++++++
docs/api/storage.rst | 63 ++++
docs/api/system_info.rst | 67 +++++
docs/api/target.rst | 113 +++++++
docs/api/tethering.rst | 32 ++
docs/api/tracing.rst | 66 +++++
docs/api/web_audio.rst | 70 +++++
docs/api/web_authn.rst | 65 ++++
89 files changed, 6228 insertions(+), 2421 deletions(-)
rename cdp/{input.py => input_.py} (91%)
create mode 100644 docs/api/accessibility.rst
create mode 100644 docs/api/animation.rst
create mode 100644 docs/api/application_cache.rst
create mode 100644 docs/api/audits.rst
create mode 100644 docs/api/background_service.rst
create mode 100644 docs/api/browser.rst
create mode 100644 docs/api/cache_storage.rst
create mode 100644 docs/api/cast.rst
create mode 100644 docs/api/console.rst
create mode 100644 docs/api/css.rst
create mode 100644 docs/api/database.rst
create mode 100644 docs/api/debugger.rst
create mode 100644 docs/api/device_orientation.rst
create mode 100644 docs/api/dom.rst
create mode 100644 docs/api/dom_debugger.rst
create mode 100644 docs/api/dom_snapshot.rst
create mode 100644 docs/api/dom_storage.rst
create mode 100644 docs/api/emulation.rst
create mode 100644 docs/api/fetch.rst
create mode 100644 docs/api/headless_experimental.rst
create mode 100644 docs/api/heap_profiler.rst
create mode 100644 docs/api/indexed_db.rst
create mode 100644 docs/api/input_.rst
create mode 100644 docs/api/inspector.rst
create mode 100644 docs/api/io.rst
create mode 100644 docs/api/layer_tree.rst
create mode 100644 docs/api/log.rst
create mode 100644 docs/api/memory.rst
create mode 100644 docs/api/network.rst
create mode 100644 docs/api/overlay.rst
create mode 100644 docs/api/page.rst
create mode 100644 docs/api/performance.rst
create mode 100644 docs/api/profiler.rst
create mode 100644 docs/api/runtime.rst
create mode 100644 docs/api/schema.rst
create mode 100644 docs/api/security.rst
create mode 100644 docs/api/service_worker.rst
create mode 100644 docs/api/storage.rst
create mode 100644 docs/api/system_info.rst
create mode 100644 docs/api/target.rst
create mode 100644 docs/api/tethering.rst
create mode 100644 docs/api/tracing.rst
create mode 100644 docs/api/web_audio.rst
create mode 100644 docs/api/web_authn.rst
diff --git a/cdp/__init__.py b/cdp/__init__.py
index ec58a08..de3a091 100644
--- a/cdp/__init__.py
+++ b/cdp/__init__.py
@@ -1,9 +1,7 @@
-'''
-DO NOT EDIT THIS FILE
-
-This file is generated from the CDP specification. If you need to make changes,
-edit the generator and regenerate all of the modules.
-'''
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
import cdp.util
@@ -30,7 +28,7 @@
import cdp.heap_profiler
import cdp.io
import cdp.indexed_db
-import cdp.input
+import cdp.input_
import cdp.inspector
import cdp.layer_tree
import cdp.log
diff --git a/cdp/accessibility.py b/cdp/accessibility.py
index 8c38e79..f355f85 100644
--- a/cdp/accessibility.py
+++ b/cdp/accessibility.py
@@ -1,13 +1,11 @@
-'''
-DO NOT EDIT THIS FILE
-
-This file is generated from the CDP specification. If you need to make changes,
-edit the generator and regenerate all of the modules.
-
-Domain: Accessibility
-Experimental: True
-'''
-
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: Accessibility (experimental)
+
+from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
from dataclasses import dataclass
import enum
@@ -25,7 +23,7 @@ def to_json(self) -> str:
return self
@classmethod
- def from_json(cls, json: str) -> 'AXNodeId':
+ def from_json(cls, json: str) -> AXNodeId:
return cls(json)
def __repr__(self):
@@ -58,7 +56,7 @@ def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> 'AXValueType':
+ def from_json(cls, json: str) -> AXValueType:
return cls(json)
@@ -77,7 +75,7 @@ def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> 'AXValueSourceType':
+ def from_json(cls, json: str) -> AXValueSourceType:
return cls(json)
@@ -98,7 +96,7 @@ def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> 'AXValueNativeSourceType':
+ def from_json(cls, json: str) -> AXValueNativeSourceType:
return cls(json)
@@ -108,25 +106,25 @@ class AXValueSource:
A single source for a computed AX property.
'''
#: What type of source this is.
- type: 'AXValueSourceType'
+ type_: AXValueSourceType
#: The value of this property source.
- value: typing.Optional['AXValue'] = None
+ value: typing.Optional[AXValue] = None
#: The name of the relevant attribute, if any.
attribute: typing.Optional[str] = None
#: The value of the relevant attribute, if any.
- attribute_value: typing.Optional['AXValue'] = None
+ attribute_value: typing.Optional[AXValue] = None
#: Whether this source is superseded by a higher priority source.
superseded: typing.Optional[bool] = None
#: The native markup source for this value, e.g. a
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: NodeId
:members:
@@ -78,6 +81,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: collect_class_names_from_subtree
.. autofunction:: copy_to
@@ -165,6 +177,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: AttributeModified
:members:
:undoc-members:
diff --git a/docs/api/dom_debugger.rst b/docs/api/dom_debugger.rst
index e9d147f..49a152c 100644
--- a/docs/api/dom_debugger.rst
+++ b/docs/api/dom_debugger.rst
@@ -13,7 +13,10 @@ execution will stop on these operations as if there was a regular breakpoint set
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: DOMBreakpointType
:members:
@@ -28,6 +31,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: get_event_listeners
.. autofunction:: remove_dom_breakpoint
@@ -48,3 +60,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/dom_snapshot.rst b/docs/api/dom_snapshot.rst
index 2735439..653d903 100644
--- a/docs/api/dom_snapshot.rst
+++ b/docs/api/dom_snapshot.rst
@@ -14,7 +14,10 @@ This domain facilitates obtaining document snapshots with DOM, layout, and style
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: DOMNode
:members:
@@ -94,6 +97,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: capture_snapshot
.. autofunction:: disable
@@ -104,3 +116,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/dom_storage.rst b/docs/api/dom_storage.rst
index edc45f4..d0ab1a1 100644
--- a/docs/api/dom_storage.rst
+++ b/docs/api/dom_storage.rst
@@ -14,7 +14,10 @@ Query and modify DOM storage.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: StorageId
:members:
@@ -29,6 +32,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: clear
.. autofunction:: disable
@@ -44,6 +56,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: DomStorageItemAdded
:members:
:undoc-members:
diff --git a/docs/api/emulation.rst b/docs/api/emulation.rst
index aee82b8..f66bea9 100644
--- a/docs/api/emulation.rst
+++ b/docs/api/emulation.rst
@@ -12,7 +12,10 @@ This domain emulates different environments for the page.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ScreenOrientation
:members:
@@ -27,6 +30,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: can_emulate
.. autofunction:: clear_device_metrics_override
@@ -72,6 +84,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: VirtualTimeBudgetExpired
:members:
:undoc-members:
diff --git a/docs/api/fetch.rst b/docs/api/fetch.rst
index 7a8acbf..beabe3a 100644
--- a/docs/api/fetch.rst
+++ b/docs/api/fetch.rst
@@ -14,7 +14,10 @@ A domain for letting clients substitute browser's network layer with client code
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: RequestId
:members:
@@ -49,6 +52,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: continue_request
.. autofunction:: continue_with_auth
@@ -68,6 +80,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: RequestPaused
:members:
:undoc-members:
diff --git a/docs/api/headless_experimental.rst b/docs/api/headless_experimental.rst
index 2c373ea..a87a3b2 100644
--- a/docs/api/headless_experimental.rst
+++ b/docs/api/headless_experimental.rst
@@ -14,7 +14,10 @@ This domain provides experimental commands only supported in headless mode.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ScreenshotParams
:members:
@@ -24,6 +27,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: begin_frame
.. autofunction:: disable
@@ -33,6 +45,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: NeedsBeginFramesChanged
:members:
:undoc-members:
diff --git a/docs/api/heap_profiler.rst b/docs/api/heap_profiler.rst
index bedb444..92ca837 100644
--- a/docs/api/heap_profiler.rst
+++ b/docs/api/heap_profiler.rst
@@ -12,7 +12,10 @@ HeapProfiler
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: HeapSnapshotObjectId
:members:
@@ -37,6 +40,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: add_inspected_heap_object
.. autofunction:: collect_garbage
@@ -64,6 +76,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: AddHeapSnapshotChunk
:members:
:undoc-members:
diff --git a/docs/api/indexed_db.rst b/docs/api/indexed_db.rst
index 528f69d..bf6e01a 100644
--- a/docs/api/indexed_db.rst
+++ b/docs/api/indexed_db.rst
@@ -12,7 +12,10 @@ IndexedDB
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: DatabaseWithObjectStores
:members:
@@ -52,6 +55,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: clear_object_store
.. autofunction:: delete_database
@@ -72,3 +84,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/input_.rst b/docs/api/input_.rst
index 4af4282..8914793 100644
--- a/docs/api/input_.rst
+++ b/docs/api/input_.rst
@@ -10,7 +10,10 @@ Input
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: TouchPoint
:members:
@@ -30,6 +33,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: dispatch_key_event
.. autofunction:: dispatch_mouse_event
@@ -50,3 +62,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/inspector.rst b/docs/api/inspector.rst
index 03c3226..cdc2a06 100644
--- a/docs/api/inspector.rst
+++ b/docs/api/inspector.rst
@@ -12,11 +12,20 @@ Inspector
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+*There are no types in this module.*
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -24,6 +33,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: Detached
:members:
:undoc-members:
diff --git a/docs/api/io.rst b/docs/api/io.rst
index 95a8f65..324e300 100644
--- a/docs/api/io.rst
+++ b/docs/api/io.rst
@@ -12,7 +12,10 @@ Input/Output operations for streams produced by DevTools.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: StreamHandle
:members:
@@ -22,6 +25,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: close
.. autofunction:: read
@@ -30,3 +42,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/layer_tree.rst b/docs/api/layer_tree.rst
index c9a6f1c..9282af1 100644
--- a/docs/api/layer_tree.rst
+++ b/docs/api/layer_tree.rst
@@ -12,7 +12,10 @@ LayerTree
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: LayerId
:members:
@@ -52,6 +55,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: compositing_reasons
.. autofunction:: disable
@@ -73,6 +85,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: LayerPainted
:members:
:undoc-members:
diff --git a/docs/api/log.rst b/docs/api/log.rst
index 43d323a..038769e 100644
--- a/docs/api/log.rst
+++ b/docs/api/log.rst
@@ -12,7 +12,10 @@ Provides access to log entries.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: LogEntry
:members:
@@ -27,6 +30,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: clear
.. autofunction:: disable
@@ -40,6 +52,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: EntryAdded
:members:
:undoc-members:
diff --git a/docs/api/memory.rst b/docs/api/memory.rst
index a8c55dc..8730cd3 100644
--- a/docs/api/memory.rst
+++ b/docs/api/memory.rst
@@ -12,7 +12,10 @@ Memory
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: PressureLevel
:members:
@@ -37,6 +40,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: forcibly_purge_java_script_memory
.. autofunction:: get_all_time_sampling_profile
@@ -59,3 +71,5 @@ Commands
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/network.rst b/docs/api/network.rst
index 5055a36..b76bde4 100644
--- a/docs/api/network.rst
+++ b/docs/api/network.rst
@@ -13,7 +13,10 @@ file, data and other requests and responses, their headers, bodies, timing, etc.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ResourceType
:members:
@@ -208,6 +211,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: can_clear_browser_cache
.. autofunction:: can_clear_browser_cookies
@@ -267,6 +279,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: DataReceived
:members:
:undoc-members:
diff --git a/docs/api/overlay.rst b/docs/api/overlay.rst
index 000a35d..7792e15 100644
--- a/docs/api/overlay.rst
+++ b/docs/api/overlay.rst
@@ -14,7 +14,10 @@ This domain provides various functionality related to drawing atop the inspected
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: HighlightConfig
:members:
@@ -29,6 +32,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -68,6 +80,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: InspectNodeRequested
:members:
:undoc-members:
diff --git a/docs/api/page.rst b/docs/api/page.rst
index ff5b5f0..043a880 100644
--- a/docs/api/page.rst
+++ b/docs/api/page.rst
@@ -12,7 +12,10 @@ Actions and events related to the inspected page belong to the page domain.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: FrameId
:members:
@@ -102,6 +105,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: add_compilation_cache
.. autofunction:: add_script_to_evaluate_on_load
@@ -213,6 +225,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: DomContentEventFired
:members:
:undoc-members:
diff --git a/docs/api/performance.rst b/docs/api/performance.rst
index 2b60751..6b87d5c 100644
--- a/docs/api/performance.rst
+++ b/docs/api/performance.rst
@@ -10,7 +10,10 @@ Performance
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: Metric
:members:
@@ -20,6 +23,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -31,6 +43,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: Metrics
:members:
:undoc-members:
diff --git a/docs/api/profiler.rst b/docs/api/profiler.rst
index 2ad1dd9..4019b39 100644
--- a/docs/api/profiler.rst
+++ b/docs/api/profiler.rst
@@ -10,7 +10,10 @@ Profiler
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ProfileNode
:members:
@@ -60,6 +63,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -87,6 +99,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: ConsoleProfileFinished
:members:
:undoc-members:
diff --git a/docs/api/runtime.rst b/docs/api/runtime.rst
index cfacd70..056955e 100644
--- a/docs/api/runtime.rst
+++ b/docs/api/runtime.rst
@@ -16,7 +16,10 @@ other objects in their object group.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ScriptId
:members:
@@ -126,6 +129,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: add_binding
.. autofunction:: await_promise
@@ -173,6 +185,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: BindingCalled
:members:
:undoc-members:
diff --git a/docs/api/schema.rst b/docs/api/schema.rst
index 2b070b9..c8dfa3b 100644
--- a/docs/api/schema.rst
+++ b/docs/api/schema.rst
@@ -12,7 +12,10 @@ This domain is deprecated.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: Domain
:members:
@@ -22,7 +25,18 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: get_domains
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/security.rst b/docs/api/security.rst
index bd46226..d111318 100644
--- a/docs/api/security.rst
+++ b/docs/api/security.rst
@@ -12,7 +12,10 @@ Security
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: CertificateId
:members:
@@ -47,6 +50,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -60,6 +72,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: CertificateError
:members:
:undoc-members:
diff --git a/docs/api/service_worker.rst b/docs/api/service_worker.rst
index 553984a..86e7322 100644
--- a/docs/api/service_worker.rst
+++ b/docs/api/service_worker.rst
@@ -12,7 +12,10 @@ ServiceWorker
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: RegistrationID
:members:
@@ -47,6 +50,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: deliver_push_message
.. autofunction:: disable
@@ -74,6 +86,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: WorkerErrorReported
:members:
:undoc-members:
diff --git a/docs/api/storage.rst b/docs/api/storage.rst
index c5f70e6..4eb90b7 100644
--- a/docs/api/storage.rst
+++ b/docs/api/storage.rst
@@ -12,7 +12,10 @@ Storage
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: StorageType
:members:
@@ -27,6 +30,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: clear_data_for_origin
.. autofunction:: get_usage_and_quota
@@ -42,6 +54,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: CacheStorageContentUpdated
:members:
:undoc-members:
diff --git a/docs/api/system_info.rst b/docs/api/system_info.rst
index 4a89aa6..4e704f5 100644
--- a/docs/api/system_info.rst
+++ b/docs/api/system_info.rst
@@ -14,7 +14,10 @@ The SystemInfo domain defines methods and events for querying low-level system i
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: GPUDevice
:members:
@@ -59,9 +62,20 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: get_info
.. autofunction:: get_process_info
Events
------
+
+*There are no events in this module.*
diff --git a/docs/api/target.rst b/docs/api/target.rst
index 3f001a6..323cb37 100644
--- a/docs/api/target.rst
+++ b/docs/api/target.rst
@@ -12,7 +12,10 @@ Supports additional targets discovery and allows to attach to them.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: TargetID
:members:
@@ -42,6 +45,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: activate_target
.. autofunction:: attach_to_browser_target
@@ -77,6 +89,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: AttachedToTarget
:members:
:undoc-members:
diff --git a/docs/api/tethering.rst b/docs/api/tethering.rst
index ec282fa..8d75b5f 100644
--- a/docs/api/tethering.rst
+++ b/docs/api/tethering.rst
@@ -14,11 +14,20 @@ The Tethering domain defines methods and events for browser port binding.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+*There are no types in this module.*
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: bind
.. autofunction:: unbind
@@ -26,6 +35,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: Accepted
:members:
:undoc-members:
diff --git a/docs/api/tracing.rst b/docs/api/tracing.rst
index 0c5371a..4898f92 100644
--- a/docs/api/tracing.rst
+++ b/docs/api/tracing.rst
@@ -12,7 +12,10 @@ Tracing
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: MemoryDumpConfig
:members:
@@ -37,6 +40,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: end
.. autofunction:: get_categories
@@ -50,6 +62,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: BufferUsage
:members:
:undoc-members:
diff --git a/docs/api/web_audio.rst b/docs/api/web_audio.rst
index 514aa19..4e5233c 100644
--- a/docs/api/web_audio.rst
+++ b/docs/api/web_audio.rst
@@ -15,7 +15,10 @@ https://webaudio.github.io/web-audio-api/
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: ContextId
:members:
@@ -45,6 +48,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: disable
.. autofunction:: enable
@@ -54,6 +66,10 @@ Commands
Events
------
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
.. autoclass:: ContextCreated
:members:
:undoc-members:
diff --git a/docs/api/web_authn.rst b/docs/api/web_authn.rst
index d6e8f1b..8bf7d2a 100644
--- a/docs/api/web_authn.rst
+++ b/docs/api/web_authn.rst
@@ -15,7 +15,10 @@ API.
Types
-----
-Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
.. autoclass:: AuthenticatorId
:members:
@@ -45,6 +48,15 @@ Generally you do not need to instantiate CDP types yourself. Instead, the API cr
Commands
--------
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
.. autofunction:: add_credential
.. autofunction:: add_virtual_authenticator
@@ -63,3 +75,5 @@ Commands
Events
------
+
+*There are no events in this module.*
From f4525a7746704f5ec2dae16ae2dff66f57699324 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Mon, 2 Mar 2020 12:39:13 -0500
Subject: [PATCH 13/81] Fix broken tests
---
generator/test_generate.py | 24 +++++++++++++++++++++++-
1 file changed, 23 insertions(+), 1 deletion(-)
diff --git a/generator/test_generate.py b/generator/test_generate.py
index 0d19459..b8e0875 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -928,19 +928,41 @@ def test_cdp_domain_sphinx():
Types
-----
- Generally you do not need to instantiate CDP types yourself. Instead, the API creates objects for you as return values from commands, and then you can use those objects as arguments to other commands.
+ Generally, you do not need to instantiate CDP types
+ yourself. Instead, the API creates objects for you as return
+ values from commands, and then you can use those objects as
+ arguments to other commands.
.. autoclass:: KeyframeStyle
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
Commands
--------
+ Each command is a generator function. The return
+ type ``Generator[x, y, z]`` indicates that the generator
+ *yields* arguments of type ``x``, it must be resumed with
+ an argument of type ``y``, and it returns type ``z``. In
+ this library, types ``x`` and ``y`` are the same for all
+ commands, and ``z`` is the return type you should pay attention
+ to. For more information, see
+ :ref:`Getting Started: Commands `.
+
.. autofunction:: get_current_time
Events
------
+ Generally, you do not need to instantiate CDP events
+ yourself. Instead, the API creates events for you and then
+ you use the event's attributes.
+
.. autoclass:: AnimationCanceled
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
""")
domain = CdpDomain.from_json(json_domain)
actual = domain.generate_sphinx()
From f6adff090b6fec2dfb15b3955bf511b01bcfec92 Mon Sep 17 00:00:00 2001
From: Ghxst <45855847+ghxstdev@users.noreply.github.com>
Date: Wed, 18 Mar 2020 15:36:50 +0100
Subject: [PATCH 14/81] Assumed typo fix in getting_started.rst
Can't reference target_info before it's assigned and from looking at `target.py` the expected parameter is the `target_id`.
---
docs/getting_started.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting_started.rst b/docs/getting_started.rst
index 1891c6d..3e48fa4 100644
--- a/docs/getting_started.rst
+++ b/docs/getting_started.rst
@@ -286,7 +286,7 @@ multiple commands concurrently.
return response
target_id = target.TargetID('F86FCB9B3890EB413FAC5DD9DD150E6F')
- target_info = run_command(target.get_target_info(target_info))
+ target_info = run_command(target.get_target_info(target_id))
print(target_info)
The script above prints something like this:
From 44d58a6fb3b232d24a0236c9c77fb03d84898d29 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Wed, 18 Mar 2020 15:49:31 -0400
Subject: [PATCH 15/81] Replace unicode dash with ASCII hyphen
This fixes some unicode issues reported in other PRs. I updated the
tests to match.
---
.gitignore | 1 +
generator/generate.py | 2 +-
generator/test_generate.py | 6 +++---
3 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/.gitignore b/.gitignore
index 2a5279e..f043e9f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,7 @@
.mypy_cache
.pytest_cache
__pycache__
+.vscode
dist
docs/_build
venv
diff --git a/generator/generate.py b/generator/generate.py
index 46477b3..6f2d9d3 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -622,7 +622,7 @@ def generate_code(self) -> str:
elif len(self.returns) > 1:
doc += '\n'
doc += ':returns: A tuple with the following items:\n\n'
- ret_docs = '\n'.join(f'{i}. **{r.name}** – {r.generate_doc()}' for i, r
+ ret_docs = '\n'.join(f'{i}. **{r.name}** - {r.generate_doc()}' for i, r
in enumerate(self.returns))
doc += indent(ret_docs, 4)
if doc:
diff --git a/generator/test_generate.py b/generator/test_generate.py
index b8e0875..e8b2344 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -550,9 +550,9 @@ def get_encoded_response(
:param size_only: *(Optional)* Whether to only return the size information (defaults to false).
:returns: A tuple with the following items:
- 0. **body** – *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
- 1. **originalSize** – Size before re-encoding.
- 2. **encodedSize** – Size after re-encoding.
+ 0. **body** - *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
+ 1. **originalSize** - Size before re-encoding.
+ 2. **encodedSize** - Size after re-encoding.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
From ceef62ef07650f71a23a3fe72c9e87f96ff212a6 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Wed, 18 Mar 2020 15:51:55 -0400
Subject: [PATCH 16/81] Regenerate code and docs
---
cdp/audits.py | 6 +++---
cdp/browser.py | 14 +++++++-------
cdp/cache_storage.py | 4 ++--
cdp/css.py | 22 +++++++++++-----------
cdp/database.py | 6 +++---
cdp/debugger.py | 28 ++++++++++++++--------------
cdp/dom.py | 12 ++++++------
cdp/dom_snapshot.py | 10 +++++-----
cdp/fetch.py | 4 ++--
cdp/headless_experimental.py | 4 ++--
cdp/indexed_db.py | 8 ++++----
cdp/io.py | 6 +++---
cdp/memory.py | 6 +++---
cdp/network.py | 8 ++++----
cdp/page.py | 30 +++++++++++++++---------------
cdp/runtime.py | 32 ++++++++++++++++----------------
cdp/storage.py | 6 +++---
cdp/system_info.py | 8 ++++----
cdp/tracing.py | 4 ++--
19 files changed, 109 insertions(+), 109 deletions(-)
diff --git a/cdp/audits.py b/cdp/audits.py
index 5bba206..67c4dde 100644
--- a/cdp/audits.py
+++ b/cdp/audits.py
@@ -30,9 +30,9 @@ def get_encoded_response(
:param size_only: *(Optional)* Whether to only return the size information (defaults to false).
:returns: A tuple with the following items:
- 0. **body** – *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
- 1. **originalSize** – Size before re-encoding.
- 2. **encodedSize** – Size after re-encoding.
+ 0. **body** - *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
+ 1. **originalSize** - Size before re-encoding.
+ 2. **encodedSize** - Size after re-encoding.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
diff --git a/cdp/browser.py b/cdp/browser.py
index c357195..20102f7 100644
--- a/cdp/browser.py
+++ b/cdp/browser.py
@@ -269,11 +269,11 @@ def get_version() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str,
:returns: A tuple with the following items:
- 0. **protocolVersion** – Protocol version.
- 1. **product** – Product name.
- 2. **revision** – Product revision.
- 3. **userAgent** – User-Agent.
- 4. **jsVersion** – V8 version.
+ 0. **protocolVersion** - Protocol version.
+ 1. **product** - Product name.
+ 2. **revision** - Product revision.
+ 3. **userAgent** - User-Agent.
+ 4. **jsVersion** - V8 version.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Browser.getVersion',
@@ -387,8 +387,8 @@ def get_window_for_target(
:param target_id: *(Optional)* Devtools agent host id. If called as a part of the session, associated targetId is used.
:returns: A tuple with the following items:
- 0. **windowId** – Browser window id.
- 1. **bounds** – Bounds information of the window. When window state is 'minimized', the restored window position and size are returned.
+ 0. **windowId** - Browser window id.
+ 1. **bounds** - Bounds information of the window. When window state is 'minimized', the restored window position and size are returned.
'''
params: T_JSON_DICT = dict()
if target_id is not None:
diff --git a/cdp/cache_storage.py b/cdp/cache_storage.py
index a134642..62646c8 100644
--- a/cdp/cache_storage.py
+++ b/cdp/cache_storage.py
@@ -267,8 +267,8 @@ def request_entries(
:param path_filter: *(Optional)* If present, only return the entries containing this substring in the path
:returns: A tuple with the following items:
- 0. **cacheDataEntries** – Array of object store data entries.
- 1. **returnCount** – Count of returned entries from this storage. If pathFilter is empty, it is the count of all entries from this storage.
+ 0. **cacheDataEntries** - Array of object store data entries.
+ 1. **returnCount** - Count of returned entries from this storage. If pathFilter is empty, it is the count of all entries from this storage.
'''
params: T_JSON_DICT = dict()
params['cacheId'] = cache_id.to_json()
diff --git a/cdp/css.py b/cdp/css.py
index c751c39..bee93c6 100644
--- a/cdp/css.py
+++ b/cdp/css.py
@@ -937,9 +937,9 @@ def get_background_colors(
:param node_id: Id of the node to get background colors for.
:returns: A tuple with the following items:
- 0. **backgroundColors** – *(Optional)* The range of background colors behind this element, if it contains any visible text. If no visible text is present, this will be undefined. In the case of a flat background color, this will consist of simply that color. In the case of a gradient, this will consist of each of the color stops. For anything more complicated, this will be an empty array. Images will be ignored (as if the image had failed to load).
- 1. **computedFontSize** – *(Optional)* The computed font size for this node, as a CSS computed value string (e.g. '12px').
- 2. **computedFontWeight** – *(Optional)* The computed font weight for this node, as a CSS computed value string (e.g. 'normal' or '100').
+ 0. **backgroundColors** - *(Optional)* The range of background colors behind this element, if it contains any visible text. If no visible text is present, this will be undefined. In the case of a flat background color, this will consist of simply that color. In the case of a gradient, this will consist of each of the color stops. For anything more complicated, this will be an empty array. Images will be ignored (as if the image had failed to load).
+ 1. **computedFontSize** - *(Optional)* The computed font size for this node, as a CSS computed value string (e.g. '12px').
+ 2. **computedFontWeight** - *(Optional)* The computed font weight for this node, as a CSS computed value string (e.g. 'normal' or '100').
'''
params: T_JSON_DICT = dict()
params['nodeId'] = node_id.to_json()
@@ -984,8 +984,8 @@ def get_inline_styles_for_node(
:param node_id:
:returns: A tuple with the following items:
- 0. **inlineStyle** – *(Optional)* Inline style for the specified DOM node.
- 1. **attributesStyle** – *(Optional)* Attribute-defined element style (e.g. resulting from "width=20 height=100%").
+ 0. **inlineStyle** - *(Optional)* Inline style for the specified DOM node.
+ 1. **attributesStyle** - *(Optional)* Attribute-defined element style (e.g. resulting from "width=20 height=100%").
'''
params: T_JSON_DICT = dict()
params['nodeId'] = node_id.to_json()
@@ -1009,12 +1009,12 @@ def get_matched_styles_for_node(
:param node_id:
:returns: A tuple with the following items:
- 0. **inlineStyle** – *(Optional)* Inline style for the specified DOM node.
- 1. **attributesStyle** – *(Optional)* Attribute-defined element style (e.g. resulting from "width=20 height=100%").
- 2. **matchedCSSRules** – *(Optional)* CSS rules matching this node, from all applicable stylesheets.
- 3. **pseudoElements** – *(Optional)* Pseudo style matches for this node.
- 4. **inherited** – *(Optional)* A chain of inherited styles (from the immediate node parent up to the DOM tree root).
- 5. **cssKeyframesRules** – *(Optional)* A list of CSS keyframed animations matching this node.
+ 0. **inlineStyle** - *(Optional)* Inline style for the specified DOM node.
+ 1. **attributesStyle** - *(Optional)* Attribute-defined element style (e.g. resulting from "width=20 height=100%").
+ 2. **matchedCSSRules** - *(Optional)* CSS rules matching this node, from all applicable stylesheets.
+ 3. **pseudoElements** - *(Optional)* Pseudo style matches for this node.
+ 4. **inherited** - *(Optional)* A chain of inherited styles (from the immediate node parent up to the DOM tree root).
+ 5. **cssKeyframesRules** - *(Optional)* A list of CSS keyframed animations matching this node.
'''
params: T_JSON_DICT = dict()
params['nodeId'] = node_id.to_json()
diff --git a/cdp/database.py b/cdp/database.py
index ed01a01..71dac2c 100644
--- a/cdp/database.py
+++ b/cdp/database.py
@@ -116,9 +116,9 @@ def execute_sql(
:param query:
:returns: A tuple with the following items:
- 0. **columnNames** –
- 1. **values** –
- 2. **sqlError** –
+ 0. **columnNames** -
+ 1. **values** -
+ 2. **sqlError** -
'''
params: T_JSON_DICT = dict()
params['databaseId'] = database_id.to_json()
diff --git a/cdp/debugger.py b/cdp/debugger.py
index 9564d5c..873d783 100644
--- a/cdp/debugger.py
+++ b/cdp/debugger.py
@@ -334,8 +334,8 @@ def evaluate_on_call_frame(
:param timeout: **(EXPERIMENTAL)** *(Optional)* Terminate execution after timing out (number of milliseconds).
:returns: A tuple with the following items:
- 0. **result** – Object wrapper for the evaluation result.
- 1. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **result** - Object wrapper for the evaluation result.
+ 1. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['callFrameId'] = call_frame_id.to_json()
@@ -488,9 +488,9 @@ def restart_frame(
:param call_frame_id: Call frame identifier to evaluate on.
:returns: A tuple with the following items:
- 0. **callFrames** – New stack trace.
- 1. **asyncStackTrace** – *(Optional)* Async stack trace, if any.
- 2. **asyncStackTraceId** – *(Optional)* Async stack trace, if any.
+ 0. **callFrames** - New stack trace.
+ 1. **asyncStackTrace** - *(Optional)* Async stack trace, if any.
+ 2. **asyncStackTraceId** - *(Optional)* Async stack trace, if any.
'''
params: T_JSON_DICT = dict()
params['callFrameId'] = call_frame_id.to_json()
@@ -620,8 +620,8 @@ def set_breakpoint(
:param condition: *(Optional)* Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true.
:returns: A tuple with the following items:
- 0. **breakpointId** – Id of the created breakpoint for further reference.
- 1. **actualLocation** – Location this breakpoint resolved into.
+ 0. **breakpointId** - Id of the created breakpoint for further reference.
+ 1. **actualLocation** - Location this breakpoint resolved into.
'''
params: T_JSON_DICT = dict()
params['location'] = location.to_json()
@@ -679,8 +679,8 @@ def set_breakpoint_by_url(
:param condition: *(Optional)* Expression to use as a breakpoint condition. When specified, debugger will only stop on the breakpoint if this expression evaluates to true.
:returns: A tuple with the following items:
- 0. **breakpointId** – Id of the created breakpoint for further reference.
- 1. **locations** – List of the locations this breakpoint resolved into upon addition.
+ 0. **breakpointId** - Id of the created breakpoint for further reference.
+ 1. **locations** - List of the locations this breakpoint resolved into upon addition.
'''
params: T_JSON_DICT = dict()
params['lineNumber'] = line_number
@@ -799,11 +799,11 @@ def set_script_source(
:param dry_run: *(Optional)* If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code.
:returns: A tuple with the following items:
- 0. **callFrames** – *(Optional)* New stack trace in case editing has happened while VM was stopped.
- 1. **stackChanged** – *(Optional)* Whether current call stack was modified after applying the changes.
- 2. **asyncStackTrace** – *(Optional)* Async stack trace, if any.
- 3. **asyncStackTraceId** – *(Optional)* Async stack trace, if any.
- 4. **exceptionDetails** – *(Optional)* Exception details if any.
+ 0. **callFrames** - *(Optional)* New stack trace in case editing has happened while VM was stopped.
+ 1. **stackChanged** - *(Optional)* Whether current call stack was modified after applying the changes.
+ 2. **asyncStackTrace** - *(Optional)* Async stack trace, if any.
+ 3. **asyncStackTraceId** - *(Optional)* Async stack trace, if any.
+ 4. **exceptionDetails** - *(Optional)* Exception details if any.
'''
params: T_JSON_DICT = dict()
params['scriptId'] = script_id.to_json()
diff --git a/cdp/dom.py b/cdp/dom.py
index e77d944..8e03c98 100644
--- a/cdp/dom.py
+++ b/cdp/dom.py
@@ -761,8 +761,8 @@ def get_node_for_location(
:param include_user_agent_shadow_dom: *(Optional)* False to skip to the nearest non-UA shadow root ancestor (default: false).
:returns: A tuple with the following items:
- 0. **backendNodeId** – Resulting node.
- 1. **nodeId** – *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
+ 0. **backendNodeId** - Resulting node.
+ 1. **nodeId** - *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
'''
params: T_JSON_DICT = dict()
params['x'] = x
@@ -939,8 +939,8 @@ def perform_search(
:param include_user_agent_shadow_dom: *(Optional)* True to search in user agent shadow DOM.
:returns: A tuple with the following items:
- 0. **searchId** – Unique search session identifier.
- 1. **resultCount** – Number of search results.
+ 0. **searchId** - Unique search session identifier.
+ 1. **resultCount** - Number of search results.
'''
params: T_JSON_DICT = dict()
params['query'] = query
@@ -1376,8 +1376,8 @@ def get_frame_owner(
:param frame_id:
:returns: A tuple with the following items:
- 0. **backendNodeId** – Resulting node.
- 1. **nodeId** – *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
+ 0. **backendNodeId** - Resulting node.
+ 1. **nodeId** - *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
'''
params: T_JSON_DICT = dict()
params['frameId'] = frame_id.to_json()
diff --git a/cdp/dom_snapshot.py b/cdp/dom_snapshot.py
index acaa411..fa63767 100644
--- a/cdp/dom_snapshot.py
+++ b/cdp/dom_snapshot.py
@@ -758,9 +758,9 @@ def get_snapshot(
:param include_user_agent_shadow_tree: *(Optional)* Whether to include UA shadow tree in the snapshot (default false).
:returns: A tuple with the following items:
- 0. **domNodes** – The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.
- 1. **layoutTreeNodes** – The nodes in the layout tree.
- 2. **computedStyles** – Whitelisted ComputedStyle properties for each node in the layout tree.
+ 0. **domNodes** - The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.
+ 1. **layoutTreeNodes** - The nodes in the layout tree.
+ 2. **computedStyles** - Whitelisted ComputedStyle properties for each node in the layout tree.
'''
params: T_JSON_DICT = dict()
params['computedStyleWhitelist'] = [i for i in computed_style_whitelist]
@@ -796,8 +796,8 @@ def capture_snapshot(
:param include_dom_rects: *(Optional)* Whether to include DOM rectangles (offsetRects, clientRects, scrollRects) into the snapshot
:returns: A tuple with the following items:
- 0. **documents** – The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.
- 1. **strings** – Shared string table that all string properties refer to with indexes.
+ 0. **documents** - The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.
+ 1. **strings** - Shared string table that all string properties refer to with indexes.
'''
params: T_JSON_DICT = dict()
params['computedStyles'] = [i for i in computed_styles]
diff --git a/cdp/fetch.py b/cdp/fetch.py
index cc22fea..1043e71 100644
--- a/cdp/fetch.py
+++ b/cdp/fetch.py
@@ -325,8 +325,8 @@ def get_response_body(
:param request_id: Identifier for the intercepted request to get body for.
:returns: A tuple with the following items:
- 0. **body** – Response body.
- 1. **base64Encoded** – True, if content was sent as base64.
+ 0. **body** - Response body.
+ 1. **base64Encoded** - True, if content was sent as base64.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
diff --git a/cdp/headless_experimental.py b/cdp/headless_experimental.py
index ad57ee7..72f81be 100644
--- a/cdp/headless_experimental.py
+++ b/cdp/headless_experimental.py
@@ -57,8 +57,8 @@ def begin_frame(
:param screenshot: *(Optional)* If set, a screenshot of the frame will be captured and returned in the response. Otherwise, no screenshot will be captured. Note that capturing a screenshot can fail, for example, during renderer initialization. In such a case, no screenshot data will be returned.
:returns: A tuple with the following items:
- 0. **hasDamage** – Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future.
- 1. **screenshotData** – *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken.
+ 0. **hasDamage** - Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future.
+ 1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken.
'''
params: T_JSON_DICT = dict()
if frame_time_ticks is not None:
diff --git a/cdp/indexed_db.py b/cdp/indexed_db.py
index 305c111..56f1328 100644
--- a/cdp/indexed_db.py
+++ b/cdp/indexed_db.py
@@ -368,8 +368,8 @@ def request_data(
:param key_range: *(Optional)* Key range.
:returns: A tuple with the following items:
- 0. **objectStoreDataEntries** – Array of object store data entries.
- 1. **hasMore** – If true, there are more entries to fetch in the given range.
+ 0. **objectStoreDataEntries** - Array of object store data entries.
+ 1. **hasMore** - If true, there are more entries to fetch in the given range.
'''
params: T_JSON_DICT = dict()
params['securityOrigin'] = security_origin
@@ -404,8 +404,8 @@ def get_metadata(
:param object_store_name: Object store name.
:returns: A tuple with the following items:
- 0. **entriesCount** – the entries count
- 1. **keyGeneratorValue** – the current value of key generator, to become the next inserted key into the object store. Valid if objectStore.autoIncrement is true.
+ 0. **entriesCount** - the entries count
+ 1. **keyGeneratorValue** - the current value of key generator, to become the next inserted key into the object store. Valid if objectStore.autoIncrement is true.
'''
params: T_JSON_DICT = dict()
params['securityOrigin'] = security_origin
diff --git a/cdp/io.py b/cdp/io.py
index 6b145c6..91b1cb2 100644
--- a/cdp/io.py
+++ b/cdp/io.py
@@ -60,9 +60,9 @@ def read(
:param size: *(Optional)* Maximum number of bytes to read (left upon the agent discretion if not specified).
:returns: A tuple with the following items:
- 0. **base64Encoded** – *(Optional)* Set if the data is base64-encoded
- 1. **data** – Data that were read.
- 2. **eof** – Set if the end-of-file condition occured while reading.
+ 0. **base64Encoded** - *(Optional)* Set if the data is base64-encoded
+ 1. **data** - Data that were read.
+ 2. **eof** - Set if the end-of-file condition occured while reading.
'''
params: T_JSON_DICT = dict()
params['handle'] = handle.to_json()
diff --git a/cdp/memory.py b/cdp/memory.py
index 1f7f03c..6647a7f 100644
--- a/cdp/memory.py
+++ b/cdp/memory.py
@@ -122,9 +122,9 @@ def get_dom_counters() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[
:returns: A tuple with the following items:
- 0. **documents** –
- 1. **nodes** –
- 2. **jsEventListeners** –
+ 0. **documents** -
+ 1. **nodes** -
+ 2. **jsEventListeners** -
'''
cmd_dict: T_JSON_DICT = {
'method': 'Memory.getDOMCounters',
diff --git a/cdp/network.py b/cdp/network.py
index 687edb4..6e60c51 100644
--- a/cdp/network.py
+++ b/cdp/network.py
@@ -1704,8 +1704,8 @@ def get_response_body(
:param request_id: Identifier of the network request to get content for.
:returns: A tuple with the following items:
- 0. **body** – Response body.
- 1. **base64Encoded** – True, if content was sent as base64.
+ 0. **body** - Response body.
+ 1. **base64Encoded** - True, if content was sent as base64.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
@@ -1750,8 +1750,8 @@ def get_response_body_for_interception(
:param interception_id: Identifier for the intercepted request to get body for.
:returns: A tuple with the following items:
- 0. **body** – Response body.
- 1. **base64Encoded** – True, if content was sent as base64.
+ 0. **body** - Response body.
+ 1. **base64Encoded** - True, if content was sent as base64.
'''
params: T_JSON_DICT = dict()
params['interceptionId'] = interception_id.to_json()
diff --git a/cdp/page.py b/cdp/page.py
index 06f6134..6eff865 100644
--- a/cdp/page.py
+++ b/cdp/page.py
@@ -859,9 +859,9 @@ def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[
:returns: A tuple with the following items:
- 0. **url** – Manifest location.
- 1. **errors** –
- 2. **data** – *(Optional)* Manifest content.
+ 0. **url** - Manifest location.
+ 1. **errors** -
+ 2. **data** - *(Optional)* Manifest content.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Page.getAppManifest',
@@ -927,9 +927,9 @@ def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tupl
:returns: A tuple with the following items:
- 0. **layoutViewport** – Metrics relating to the layout viewport.
- 1. **visualViewport** – Metrics relating to the visual viewport.
- 2. **contentSize** – Size of scrollable area.
+ 0. **layoutViewport** - Metrics relating to the layout viewport.
+ 1. **visualViewport** - Metrics relating to the visual viewport.
+ 2. **contentSize** - Size of scrollable area.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Page.getLayoutMetrics',
@@ -948,8 +948,8 @@ def get_navigation_history() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.
:returns: A tuple with the following items:
- 0. **currentIndex** – Index of the current navigation history entry.
- 1. **entries** – Array of navigation history entries.
+ 0. **currentIndex** - Index of the current navigation history entry.
+ 1. **entries** - Array of navigation history entries.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Page.getNavigationHistory',
@@ -984,8 +984,8 @@ def get_resource_content(
:param url: URL of the resource to get content for.
:returns: A tuple with the following items:
- 0. **content** – Resource content.
- 1. **base64Encoded** – True, if content was served as base64.
+ 0. **content** - Resource content.
+ 1. **base64Encoded** - True, if content was served as base64.
'''
params: T_JSON_DICT = dict()
params['frameId'] = frame_id.to_json()
@@ -1052,9 +1052,9 @@ def navigate(
:param frame_id: *(Optional)* Frame id to navigate, if not specified navigates the top frame.
:returns: A tuple with the following items:
- 0. **frameId** – Frame id that has navigated (or failed to navigate)
- 1. **loaderId** – *(Optional)* Loader identifier.
- 2. **errorText** – *(Optional)* User friendly error message, present if and only if navigation has failed.
+ 0. **frameId** - Frame id that has navigated (or failed to navigate)
+ 1. **loaderId** - *(Optional)* Loader identifier.
+ 2. **errorText** - *(Optional)* User friendly error message, present if and only if navigation has failed.
'''
params: T_JSON_DICT = dict()
params['url'] = url
@@ -1132,8 +1132,8 @@ def print_to_pdf(
:param transfer_mode: **(EXPERIMENTAL)** *(Optional)* return as stream
:returns: A tuple with the following items:
- 0. **data** – Base64-encoded pdf data. Empty if `` returnAsStream` is specified.
- 1. **stream** – *(Optional)* A handle of the stream that holds resulting PDF data.
+ 0. **data** - Base64-encoded pdf data. Empty if `` returnAsStream` is specified.
+ 1. **stream** - *(Optional)* A handle of the stream that holds resulting PDF data.
'''
params: T_JSON_DICT = dict()
if landscape is not None:
diff --git a/cdp/runtime.py b/cdp/runtime.py
index 02150a4..0a22db6 100644
--- a/cdp/runtime.py
+++ b/cdp/runtime.py
@@ -704,8 +704,8 @@ def await_promise(
:param generate_preview: *(Optional)* Whether preview should be generated for the result.
:returns: A tuple with the following items:
- 0. **result** – Promise result. Will contain rejected value if promise was rejected.
- 1. **exceptionDetails** – *(Optional)* Exception details if stack strace is available.
+ 0. **result** - Promise result. Will contain rejected value if promise was rejected.
+ 1. **exceptionDetails** - *(Optional)* Exception details if stack strace is available.
'''
params: T_JSON_DICT = dict()
params['promiseObjectId'] = promise_object_id.to_json()
@@ -752,8 +752,8 @@ def call_function_on(
:param object_group: *(Optional)* Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object.
:returns: A tuple with the following items:
- 0. **result** – Call result.
- 1. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **result** - Call result.
+ 1. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['functionDeclaration'] = function_declaration
@@ -801,8 +801,8 @@ def compile_script(
:param execution_context_id: *(Optional)* Specifies in which execution context to perform script run. If the parameter is omitted the evaluation will be performed in the context of the inspected page.
:returns: A tuple with the following items:
- 0. **scriptId** – *(Optional)* Id of the script.
- 1. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **scriptId** - *(Optional)* Id of the script.
+ 1. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['expression'] = expression
@@ -882,8 +882,8 @@ def evaluate(
:param timeout: **(EXPERIMENTAL)** *(Optional)* Terminate execution after timing out (number of milliseconds).
:returns: A tuple with the following items:
- 0. **result** – Evaluation result.
- 1. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **result** - Evaluation result.
+ 1. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['expression'] = expression
@@ -942,8 +942,8 @@ def get_heap_usage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[fl
:returns: A tuple with the following items:
- 0. **usedSize** – Used heap size in bytes.
- 1. **totalSize** – Allocated heap size in bytes.
+ 0. **usedSize** - Used heap size in bytes.
+ 1. **totalSize** - Allocated heap size in bytes.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.getHeapUsage',
@@ -971,10 +971,10 @@ def get_properties(
:param generate_preview: **(EXPERIMENTAL)** *(Optional)* Whether preview should be generated for the results.
:returns: A tuple with the following items:
- 0. **result** – Object properties.
- 1. **internalProperties** – *(Optional)* Internal object properties (only of the element itself).
- 2. **privateProperties** – *(Optional)* Object private properties.
- 3. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **result** - Object properties.
+ 1. **internalProperties** - *(Optional)* Internal object properties (only of the element itself).
+ 2. **privateProperties** - *(Optional)* Object private properties.
+ 3. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['objectId'] = object_id.to_json()
@@ -1105,8 +1105,8 @@ def run_script(
:param await_promise: *(Optional)* Whether execution should ````await``` for resulting value and return once awaited promise is resolved.
:returns: A tuple with the following items:
- 0. **result** – Run result.
- 1. **exceptionDetails** – *(Optional)* Exception details.
+ 0. **result** - Run result.
+ 1. **exceptionDetails** - *(Optional)* Exception details.
'''
params: T_JSON_DICT = dict()
params['scriptId'] = script_id.to_json()
diff --git a/cdp/storage.py b/cdp/storage.py
index 0fa0c7d..80f4b88 100644
--- a/cdp/storage.py
+++ b/cdp/storage.py
@@ -90,9 +90,9 @@ def get_usage_and_quota(
:param origin: Security origin.
:returns: A tuple with the following items:
- 0. **usage** – Storage usage (bytes).
- 1. **quota** – Storage quota (bytes).
- 2. **usageBreakdown** – Storage usage per type (bytes).
+ 0. **usage** - Storage usage (bytes).
+ 1. **quota** - Storage quota (bytes).
+ 2. **usageBreakdown** - Storage usage per type (bytes).
'''
params: T_JSON_DICT = dict()
params['origin'] = origin
diff --git a/cdp/system_info.py b/cdp/system_info.py
index 2f4a844..71cf4d7 100644
--- a/cdp/system_info.py
+++ b/cdp/system_info.py
@@ -291,10 +291,10 @@ def get_info() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[GPUInfo,
:returns: A tuple with the following items:
- 0. **gpu** – Information about the GPUs on the system.
- 1. **modelName** – A platform-dependent description of the model of the machine. On Mac OS, this is, for example, 'MacBookPro'. Will be the empty string if not supported.
- 2. **modelVersion** – A platform-dependent description of the version of the machine. On Mac OS, this is, for example, '10.1'. Will be the empty string if not supported.
- 3. **commandLine** – The command line string used to launch the browser. Will be the empty string if not supported.
+ 0. **gpu** - Information about the GPUs on the system.
+ 1. **modelName** - A platform-dependent description of the model of the machine. On Mac OS, this is, for example, 'MacBookPro'. Will be the empty string if not supported.
+ 2. **modelVersion** - A platform-dependent description of the version of the machine. On Mac OS, this is, for example, '10.1'. Will be the empty string if not supported.
+ 3. **commandLine** - The command line string used to launch the browser. Will be the empty string if not supported.
'''
cmd_dict: T_JSON_DICT = {
'method': 'SystemInfo.getInfo',
diff --git a/cdp/tracing.py b/cdp/tracing.py
index 4f5832b..475751b 100644
--- a/cdp/tracing.py
+++ b/cdp/tracing.py
@@ -166,8 +166,8 @@ def request_memory_dump() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tup
:returns: A tuple with the following items:
- 0. **dumpGuid** – GUID of the resulting global memory dump.
- 1. **success** – True iff the global memory dump succeeded.
+ 0. **dumpGuid** - GUID of the resulting global memory dump.
+ 1. **success** - True iff the global memory dump succeeded.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Tracing.requestMemoryDump',
From c5dff78553d7c0855d287efdbc5694cb8f5b1b5e Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 08:31:04 -0400
Subject: [PATCH 17/81] Switch from setup.py to Poetry.
---
.gitignore | 3 +-
.travis.yml | 8 +-
Makefile | 8 +-
docs/develop.rst | 9 +-
docs/overview.rst | 2 +-
poetry.lock | 729 ++++++++++++++++++++++++++++++++++++++++++++++
pyproject.toml | 30 ++
requirements.txt | 9 -
setup.py | 31 --
9 files changed, 777 insertions(+), 52 deletions(-)
create mode 100644 poetry.lock
create mode 100644 pyproject.toml
delete mode 100644 requirements.txt
delete mode 100644 setup.py
diff --git a/.gitignore b/.gitignore
index f043e9f..f8ddaa9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,10 @@
*.egg-info
+.ipynb_checkpoints
.mypy_cache
.pytest_cache
__pycache__
.vscode
+build
dist
docs/_build
venv
-build
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 987f5e1..69b90ef 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,9 +8,11 @@ matrix:
include:
- python: 3.7
+before_install:
+ - pip install poetry
+
install:
- - pip install -e .
- - pip install -r requirements.txt
+ - poetry install
script:
- - make
+ - poetry run make
diff --git a/Makefile b/Makefile
index ce2693f..a560d24 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,6 @@
+# The targets in this makefile should be executed inside Poetry, i.e. `poetry make
+# docs`.
+
.PHONY: docs
default: mypy-generate test-generate generate test-import mypy-cdp test-cdp
@@ -14,11 +17,6 @@ mypy-cdp:
mypy-generate:
mypy generator/
-publish:
- rm -fr dist chrome_devtools_protocol.egg-info
- $(PYTHON) setup.py sdist
- twine upload dist/*
-
test-cdp:
pytest test/
diff --git a/docs/develop.rst b/docs/develop.rst
index 5422d2b..3e6bd73 100644
--- a/docs/develop.rst
+++ b/docs/develop.rst
@@ -6,8 +6,13 @@ modify the code generation process. Note that code is generated and then checked
in, so that anybody who wants to use the library can use it immediately–no build
step is required in that scenario.
-You'll first need to install the extra dependencies specified in
-``requirements.txt``, ideally inside a virtual environment.
+The repository uses `Poetry `_ to manage dependencies. Once
+you have Poetry installed, use this command to create a new virtual environment and
+install PyCDP and its dependencies (including dev dependencies0 in it.
+
+::
+
+ $ poetry install
Next, a ``Makefile`` is included that provides the following build targets:
diff --git a/docs/overview.rst b/docs/overview.rst
index 642c189..eeacb11 100644
--- a/docs/overview.rst
+++ b/docs/overview.rst
@@ -31,7 +31,7 @@ Chrome package:
* `Windows 32-bit `_
* `Windows 64-bit `_
-**To install (requires Python ≥3.7):**
+**Install from PyPI (requires Python ≥3.7):**
::
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..2b95fae
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,729 @@
+[[package]]
+category = "dev"
+description = "A configurable sidebar-enabled Sphinx theme"
+name = "alabaster"
+optional = false
+python-versions = "*"
+version = "0.7.12"
+
+[[package]]
+category = "dev"
+description = "Atomic file writes."
+marker = "sys_platform == \"win32\""
+name = "atomicwrites"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "1.3.0"
+
+[[package]]
+category = "dev"
+description = "Classes Without Boilerplate"
+name = "attrs"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "19.3.0"
+
+[package.extras]
+azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
+dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
+docs = ["sphinx", "zope.interface"]
+tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
+
+[[package]]
+category = "dev"
+description = "Internationalization utilities"
+name = "babel"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "2.8.0"
+
+[package.dependencies]
+pytz = ">=2015.7"
+
+[[package]]
+category = "dev"
+description = "Python package for providing Mozilla's CA Bundle."
+name = "certifi"
+optional = false
+python-versions = "*"
+version = "2020.4.5.1"
+
+[[package]]
+category = "dev"
+description = "Universal encoding detector for Python 2 and 3"
+name = "chardet"
+optional = false
+python-versions = "*"
+version = "3.0.4"
+
+[[package]]
+category = "dev"
+description = "Cross-platform colored terminal text."
+marker = "sys_platform == \"win32\""
+name = "colorama"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+version = "0.4.3"
+
+[[package]]
+category = "main"
+description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
+name = "deprecated"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "1.2.9"
+
+[package.dependencies]
+wrapt = ">=1.10,<2"
+
+[package.extras]
+dev = ["tox", "bumpversion (<1)", "sphinx (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
+
+[[package]]
+category = "dev"
+description = "Docutils -- Python Documentation Utilities"
+name = "docutils"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+version = "0.16"
+
+[[package]]
+category = "dev"
+description = "Internationalized Domain Names in Applications (IDNA)"
+name = "idna"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "2.9"
+
+[[package]]
+category = "dev"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+name = "imagesize"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "1.2.0"
+
+[[package]]
+category = "dev"
+description = "Read metadata from Python packages"
+marker = "python_version < \"3.8\""
+name = "importlib-metadata"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+version = "1.6.0"
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["sphinx", "rst.linker"]
+testing = ["packaging", "importlib-resources"]
+
+[[package]]
+category = "dev"
+description = "A port of Ruby on Rails inflector to Python"
+name = "inflection"
+optional = false
+python-versions = ">=3.5"
+version = "0.4.0"
+
+[[package]]
+category = "dev"
+description = "A very fast and expressive template engine."
+name = "jinja2"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+version = "2.11.2"
+
+[package.dependencies]
+MarkupSafe = ">=0.23"
+
+[package.extras]
+i18n = ["Babel (>=0.8)"]
+
+[[package]]
+category = "dev"
+description = "Safely add untrusted strings to HTML/XML markup."
+name = "markupsafe"
+optional = false
+python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
+version = "1.1.1"
+
+[[package]]
+category = "dev"
+description = "More routines for operating on iterables, beyond itertools"
+name = "more-itertools"
+optional = false
+python-versions = ">=3.5"
+version = "8.2.0"
+
+[[package]]
+category = "dev"
+description = "Optional static typing for Python"
+name = "mypy"
+optional = false
+python-versions = ">=3.5"
+version = "0.770"
+
+[package.dependencies]
+mypy-extensions = ">=0.4.3,<0.5.0"
+typed-ast = ">=1.4.0,<1.5.0"
+typing-extensions = ">=3.7.4"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+
+[[package]]
+category = "dev"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+name = "mypy-extensions"
+optional = false
+python-versions = "*"
+version = "0.4.3"
+
+[[package]]
+category = "dev"
+description = "Core utilities for Python packages"
+name = "packaging"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "20.3"
+
+[package.dependencies]
+pyparsing = ">=2.0.2"
+six = "*"
+
+[[package]]
+category = "dev"
+description = "plugin and hook calling mechanisms for python"
+name = "pluggy"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "0.13.1"
+
+[package.dependencies]
+[package.dependencies.importlib-metadata]
+python = "<3.8"
+version = ">=0.12"
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+
+[[package]]
+category = "dev"
+description = "library with cross-python path, ini-parsing, io, code, log facilities"
+name = "py"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+version = "1.8.1"
+
+[[package]]
+category = "dev"
+description = "Pygments is a syntax highlighting package written in Python."
+name = "pygments"
+optional = false
+python-versions = ">=3.5"
+version = "2.6.1"
+
+[[package]]
+category = "dev"
+description = "Python parsing module"
+name = "pyparsing"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+version = "2.4.7"
+
+[[package]]
+category = "dev"
+description = "pytest: simple powerful testing with Python"
+name = "pytest"
+optional = false
+python-versions = ">=3.5"
+version = "5.4.1"
+
+[package.dependencies]
+atomicwrites = ">=1.0"
+attrs = ">=17.4.0"
+colorama = "*"
+more-itertools = ">=4.0.0"
+packaging = "*"
+pluggy = ">=0.12,<1.0"
+py = ">=1.5.0"
+wcwidth = "*"
+
+[package.dependencies.importlib-metadata]
+python = "<3.8"
+version = ">=0.12"
+
+[package.extras]
+checkqa-mypy = ["mypy (v0.761)"]
+testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
+
+[[package]]
+category = "dev"
+description = "World timezone definitions, modern and historical"
+name = "pytz"
+optional = false
+python-versions = "*"
+version = "2019.3"
+
+[[package]]
+category = "dev"
+description = "Python HTTP for Humans."
+name = "requests"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+version = "2.23.0"
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+chardet = ">=3.0.2,<4"
+idna = ">=2.5,<3"
+urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
+
+[package.extras]
+security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
+socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
+
+[[package]]
+category = "dev"
+description = "Python 2 and 3 compatibility utilities"
+name = "six"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+version = "1.14.0"
+
+[[package]]
+category = "dev"
+description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms."
+name = "snowballstemmer"
+optional = false
+python-versions = "*"
+version = "2.0.0"
+
+[[package]]
+category = "dev"
+description = "Python documentation generator"
+name = "sphinx"
+optional = false
+python-versions = ">=3.5"
+version = "3.0.1"
+
+[package.dependencies]
+Jinja2 = ">=2.3"
+Pygments = ">=2.0"
+alabaster = ">=0.7,<0.8"
+babel = ">=1.3"
+colorama = ">=0.3.5"
+docutils = ">=0.12"
+imagesize = "*"
+packaging = "*"
+requests = ">=2.5.0"
+setuptools = "*"
+snowballstemmer = ">=1.1"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = "*"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = "*"
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.770)", "docutils-stubs"]
+test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"]
+
+[[package]]
+category = "dev"
+description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
+name = "sphinx-autodoc-typehints"
+optional = false
+python-versions = ">=3.5.2"
+version = "1.10.3"
+
+[package.dependencies]
+Sphinx = ">=2.1"
+
+[package.extras]
+test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "dataclasses"]
+type_comments = ["typed-ast (>=1.4.0)"]
+
+[[package]]
+category = "dev"
+description = "Read the Docs theme for Sphinx"
+name = "sphinx-rtd-theme"
+optional = false
+python-versions = "*"
+version = "0.4.3"
+
+[package.dependencies]
+sphinx = "*"
+
+[[package]]
+category = "dev"
+description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
+name = "sphinxcontrib-applehelp"
+optional = false
+python-versions = ">=3.5"
+version = "1.0.2"
+
+[package.extras]
+lint = ["flake8", "mypy", "docutils-stubs"]
+test = ["pytest"]
+
+[[package]]
+category = "dev"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
+name = "sphinxcontrib-devhelp"
+optional = false
+python-versions = ">=3.5"
+version = "1.0.2"
+
+[package.extras]
+lint = ["flake8", "mypy", "docutils-stubs"]
+test = ["pytest"]
+
+[[package]]
+category = "dev"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+name = "sphinxcontrib-htmlhelp"
+optional = false
+python-versions = ">=3.5"
+version = "1.0.3"
+
+[package.extras]
+lint = ["flake8", "mypy", "docutils-stubs"]
+test = ["pytest", "html5lib"]
+
+[[package]]
+category = "dev"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+name = "sphinxcontrib-jsmath"
+optional = false
+python-versions = ">=3.5"
+version = "1.0.1"
+
+[package.extras]
+test = ["pytest", "flake8", "mypy"]
+
+[[package]]
+category = "dev"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
+name = "sphinxcontrib-qthelp"
+optional = false
+python-versions = ">=3.5"
+version = "1.0.3"
+
+[package.extras]
+lint = ["flake8", "mypy", "docutils-stubs"]
+test = ["pytest"]
+
+[[package]]
+category = "dev"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
+name = "sphinxcontrib-serializinghtml"
+optional = false
+python-versions = ">=3.5"
+version = "1.1.4"
+
+[package.extras]
+lint = ["flake8", "mypy", "docutils-stubs"]
+test = ["pytest"]
+
+[[package]]
+category = "dev"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+name = "typed-ast"
+optional = false
+python-versions = "*"
+version = "1.4.1"
+
+[[package]]
+category = "dev"
+description = "Backported and Experimental Type Hints for Python 3.5+"
+name = "typing-extensions"
+optional = false
+python-versions = "*"
+version = "3.7.4.2"
+
+[[package]]
+category = "dev"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+name = "urllib3"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+version = "1.25.8"
+
+[package.extras]
+brotli = ["brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
+
+[[package]]
+category = "dev"
+description = "Measures number of Terminal column cells of wide-character codes"
+name = "wcwidth"
+optional = false
+python-versions = "*"
+version = "0.1.9"
+
+[[package]]
+category = "main"
+description = "Module for decorators, wrappers and monkey patching."
+name = "wrapt"
+optional = false
+python-versions = "*"
+version = "1.12.1"
+
+[[package]]
+category = "dev"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+marker = "python_version < \"3.8\""
+name = "zipp"
+optional = false
+python-versions = ">=3.6"
+version = "3.1.0"
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
+testing = ["jaraco.itertools", "func-timeout"]
+
+[metadata]
+content-hash = "fbfb43b8fe09bf1ef0e1bffc589aee6bb49cacfb0996a0a3cfc397e801b04b56"
+python-versions = "^3.7"
+
+[metadata.files]
+alabaster = [
+ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
+ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
+]
+atomicwrites = [
+ {file = "atomicwrites-1.3.0-py2.py3-none-any.whl", hash = "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4"},
+ {file = "atomicwrites-1.3.0.tar.gz", hash = "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"},
+]
+attrs = [
+ {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
+ {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
+]
+babel = [
+ {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"},
+ {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"},
+]
+certifi = [
+ {file = "certifi-2020.4.5.1-py2.py3-none-any.whl", hash = "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304"},
+ {file = "certifi-2020.4.5.1.tar.gz", hash = "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"},
+]
+chardet = [
+ {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
+ {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
+]
+colorama = [
+ {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
+ {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
+]
+deprecated = [
+ {file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"},
+ {file = "Deprecated-1.2.9.tar.gz", hash = "sha256:0cf37d293a96805c6afd8b5fc525cb40f23a2cac9b2d066ac3bd4b04e72ceccc"},
+]
+docutils = [
+ {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
+ {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
+]
+idna = [
+ {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"},
+ {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"},
+]
+imagesize = [
+ {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"},
+ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"},
+]
+importlib-metadata = [
+ {file = "importlib_metadata-1.6.0-py2.py3-none-any.whl", hash = "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f"},
+ {file = "importlib_metadata-1.6.0.tar.gz", hash = "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"},
+]
+inflection = [
+ {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"},
+ {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"},
+]
+jinja2 = [
+ {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"},
+ {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"},
+]
+markupsafe = [
+ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"},
+ {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"},
+ {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"},
+ {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"},
+ {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"},
+ {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"},
+ {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"},
+ {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"},
+ {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"},
+ {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"},
+ {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"},
+ {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"},
+ {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"},
+ {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"},
+ {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"},
+ {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"},
+ {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"},
+ {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"},
+ {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"},
+ {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"},
+ {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"},
+ {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"},
+ {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"},
+ {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"},
+ {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"},
+ {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"},
+ {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"},
+ {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"},
+]
+more-itertools = [
+ {file = "more-itertools-8.2.0.tar.gz", hash = "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"},
+ {file = "more_itertools-8.2.0-py3-none-any.whl", hash = "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c"},
+]
+mypy = [
+ {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
+ {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
+ {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"},
+ {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"},
+ {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"},
+ {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"},
+ {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"},
+ {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"},
+ {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"},
+ {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"},
+ {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"},
+ {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"},
+ {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"},
+ {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"},
+]
+mypy-extensions = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
+packaging = [
+ {file = "packaging-20.3-py2.py3-none-any.whl", hash = "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"},
+ {file = "packaging-20.3.tar.gz", hash = "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3"},
+]
+pluggy = [
+ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
+ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
+]
+py = [
+ {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"},
+ {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"},
+]
+pygments = [
+ {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"},
+ {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"},
+]
+pyparsing = [
+ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
+ {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+]
+pytest = [
+ {file = "pytest-5.4.1-py3-none-any.whl", hash = "sha256:0e5b30f5cb04e887b91b1ee519fa3d89049595f428c1db76e73bd7f17b09b172"},
+ {file = "pytest-5.4.1.tar.gz", hash = "sha256:84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970"},
+]
+pytz = [
+ {file = "pytz-2019.3-py2.py3-none-any.whl", hash = "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d"},
+ {file = "pytz-2019.3.tar.gz", hash = "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"},
+]
+requests = [
+ {file = "requests-2.23.0-py2.py3-none-any.whl", hash = "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee"},
+ {file = "requests-2.23.0.tar.gz", hash = "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"},
+]
+six = [
+ {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"},
+ {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"},
+]
+snowballstemmer = [
+ {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"},
+ {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"},
+]
+sphinx = [
+ {file = "Sphinx-3.0.1-py3-none-any.whl", hash = "sha256:8411878f4768ec2a8896b844d68070204f9354a831b37937989c2e559d29dffc"},
+ {file = "Sphinx-3.0.1.tar.gz", hash = "sha256:50972d83b78990fd61d0d3fe8620814cae53db29443e92c13661bc43dff46ec8"},
+]
+sphinx-autodoc-typehints = [
+ {file = "sphinx-autodoc-typehints-1.10.3.tar.gz", hash = "sha256:a6b3180167479aca2c4d1ed3b5cb044a70a76cccd6b38662d39288ebd9f0dff0"},
+ {file = "sphinx_autodoc_typehints-1.10.3-py3-none-any.whl", hash = "sha256:27c9e6ef4f4451766ab8d08b2d8520933b97beb21c913f3df9ab2e59b56e6c6c"},
+]
+sphinx-rtd-theme = [
+ {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"},
+ {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"},
+]
+sphinxcontrib-applehelp = [
+ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
+ {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
+]
+sphinxcontrib-devhelp = [
+ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
+ {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
+]
+sphinxcontrib-htmlhelp = [
+ {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"},
+ {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"},
+]
+sphinxcontrib-jsmath = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+sphinxcontrib-qthelp = [
+ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
+ {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
+]
+sphinxcontrib-serializinghtml = [
+ {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"},
+ {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"},
+]
+typed-ast = [
+ {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
+ {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
+ {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"},
+ {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"},
+ {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"},
+ {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"},
+ {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"},
+ {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"},
+ {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"},
+ {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"},
+ {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"},
+ {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"},
+ {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"},
+ {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"},
+ {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"},
+ {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"},
+ {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"},
+ {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"},
+ {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"},
+ {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"},
+ {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
+]
+typing-extensions = [
+ {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
+ {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
+ {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
+]
+urllib3 = [
+ {file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"},
+ {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"},
+]
+wcwidth = [
+ {file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"},
+ {file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"},
+]
+wrapt = [
+ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
+]
+zipp = [
+ {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"},
+ {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..1f72088
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,30 @@
+[tool.poetry]
+name = "python-chrome-devtools-protocol"
+packages = [{include = "cdp"}]
+version = "0.4.0"
+description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
+authors = ["Mark E. Haase ", "Brian Mackintosh "]
+license = "MIT"
+readme = "README.md"
+homepage = "https://github.com/hyperiongray/python-chrome-devtools-protocol"
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "Intended Audience :: Developers",
+ "Topic :: Internet"
+]
+
+[tool.poetry.dependencies]
+python = "^3.7"
+deprecated = "^1.2.9"
+
+[tool.poetry.dev-dependencies]
+inflection = "^0.4.0"
+mypy = "^0.770"
+pytest = "^5.4.1"
+sphinx = "^3.0.1"
+sphinx-autodoc-typehints = "^1.10.3"
+sphinx-rtd-theme = "^0.4.3"
+
+[build-system]
+requires = ["poetry>=0.12"]
+build-backend = "poetry.masonry.api"
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 2384fce..0000000
--- a/requirements.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-# These packages are required for generating the CDP sources. They are not
-# needed if you only want to install this library.
-inflection
-mypy
-pytest
-sphinx
-sphinx-autodoc-typehints
-sphinx-rtd-theme
-twine
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 4dc6853..0000000
--- a/setup.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from pathlib import Path
-from setuptools import setup, find_packages
-
-here = Path(__file__).parent
-
-with (here / 'README.md').open(encoding='utf8') as f:
- long_description = f.read()
-
-setup(
- name='chrome-devtools-protocol',
- version='0.3.0',
- description='Python type wrappers for Chrome DevTools Protocol (CDP)',
- long_description=long_description,
- long_description_content_type='text/markdown',
- url='https://github.com/HyperionGray/python-chrome-devtools-protocol',
- author='Mark E. Haase , Brian Mackintosh ',
- classifiers=[
- 'Development Status :: 3 - Alpha',
- 'Intended Audience :: Developers',
- 'Topic :: Software Development :: Libraries',
- 'License :: OSI Approved :: MIT License',
- 'Programming Language :: Python :: 3.7',
- ],
- python_requires='>=3.7',
- keywords='chrome devtools protocol cdp',
- package_data={'cdp': ['py.typed']},
- packages=find_packages(exclude=['build', 'docs', 'examples', 'generator']),
- install_requires=[
- 'deprecated'
- ]
-)
From 73cc62e734a0f11fa98fcd4a49fec723360b81c4 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 09:08:06 -0400
Subject: [PATCH 18/81] Attempt to fix RTD build
---
.readthedocs.yml | 11 +++++++++++
pyproject.toml | 5 +++++
2 files changed, 16 insertions(+)
create mode 100644 .readthedocs.yml
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 0000000..95d8548
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,11 @@
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+version: 2
+
+sphinx:
+ configuration: docs/conf.py
+
+python:
+ version: 3.7
+ pip_install: true
+ extra_requirements:
+ - docs
diff --git a/pyproject.toml b/pyproject.toml
index 1f72088..f11e537 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,6 +25,11 @@ sphinx = "^3.0.1"
sphinx-autodoc-typehints = "^1.10.3"
sphinx-rtd-theme = "^0.4.3"
+# The docs extra is a temporary hack to build on RTD. See:
+# https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-489818696
+[tool.poetry.extras]
+docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme"]
+
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
From a387725acb7b22bf0fdc01cef68afe6dbdb574db Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 09:11:53 -0400
Subject: [PATCH 19/81] Another tweak to fix RTD build
---
.readthedocs.yml | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 95d8548..8670966 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -6,6 +6,8 @@ sphinx:
python:
version: 3.7
- pip_install: true
- extra_requirements:
- - docs
+ install:
+ - method: pip
+ path: .
+ extra_requirements:
+ - docs
From d0004891772d210f43d5f04294f09c8f7fa1e32f Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 09:24:03 -0400
Subject: [PATCH 20/81] Previous approach for fixing RTD build was not
working...
Trying something else in this commit.
---
.readthedocs.yml | 5 +----
docs/requirements.txt | 6 ++++++
pyproject.toml | 5 -----
3 files changed, 7 insertions(+), 9 deletions(-)
create mode 100644 docs/requirements.txt
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 8670966..70a3958 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -7,7 +7,4 @@ sphinx:
python:
version: 3.7
install:
- - method: pip
- path: .
- extra_requirements:
- - docs
+ - requirements: docs/requirements.txt
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..bd13c42
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,6 @@
+# RTD does not support Poetry natively, so this is a temporary workaround. See:
+# https://github.com/readthedocs/readthedocs.org/issues/4912
+sphinx
+sphinx-autodoc-typehints
+sphinx-rtd-theme
+.
diff --git a/pyproject.toml b/pyproject.toml
index f11e537..1f72088 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,11 +25,6 @@ sphinx = "^3.0.1"
sphinx-autodoc-typehints = "^1.10.3"
sphinx-rtd-theme = "^0.4.3"
-# The docs extra is a temporary hack to build on RTD. See:
-# https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-489818696
-[tool.poetry.extras]
-docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme"]
-
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
From 74b955ea944e9597a98179883e45578c71555110 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 09:31:25 -0400
Subject: [PATCH 21/81] Fix typo in makefile
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index a560d24..8aac250 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,4 @@
-# The targets in this makefile should be executed inside Poetry, i.e. `poetry make
+# The targets in this makefile should be executed inside Poetry, i.e. `poetry run make
# docs`.
.PHONY: docs
From 6bd5615593945f91180cb9cded9bee3afdc9ca72 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 09:36:41 -0400
Subject: [PATCH 22/81] Bump version to 0.5.0
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index 1f72088..cecf1ce 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]
name = "python-chrome-devtools-protocol"
packages = [{include = "cdp"}]
-version = "0.4.0"
+version = "0.5.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
authors = ["Mark E. Haase ", "Brian Mackintosh "]
license = "MIT"
From eeffe313a0f5d1abb88d9f13b1487efcfc20c5d6 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 13:09:06 -0400
Subject: [PATCH 23/81] Whoops, poetry had the wrong package name
This differed from the previous PyPI name. Rolling back version and
correcting the name in order to republish.
---
pyproject.toml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index cecf1ce..f0446d3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]
-name = "python-chrome-devtools-protocol"
+name = "chrome-devtools-protocol"
packages = [{include = "cdp"}]
-version = "0.5.0"
+version = "0.4.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
authors = ["Mark E. Haase ", "Brian Mackintosh "]
license = "MIT"
From 5463a5f3d20100255c932961b944e4b37dbb7e61 Mon Sep 17 00:00:00 2001
From: "Mark E. Haase"
Date: Tue, 14 Apr 2020 13:11:08 -0400
Subject: [PATCH 24/81] Bump version to 0.5.0 (again)
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index f0446d3..b60438e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.poetry]
name = "chrome-devtools-protocol"
packages = [{include = "cdp"}]
-version = "0.4.0"
+version = "0.5.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
authors = ["Mark E. Haase ", "Brian Mackintosh "]
license = "MIT"
From a758f0992d155abf60a3abc57975cdd3f44064ea Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 18:10:23 -0300
Subject: [PATCH 25/81] Fixed #26 -- Support CDP spec r970581
---
cdp/__init__.py | 4 +-
cdp/accessibility.py | 193 +-
cdp/application_cache.py | 207 -
cdp/audits.py | 1171 +-
cdp/background_service.py | 1 +
cdp/browser.py | 265 +-
cdp/cache_storage.py | 16 +-
cdp/cast.py | 17 +
cdp/css.py | 285 +-
cdp/debugger.py | 167 +-
cdp/dom.py | 241 +-
cdp/dom_debugger.py | 34 +
cdp/dom_snapshot.py | 57 +-
cdp/emulation.py | 287 +-
cdp/event_breakpoints.py | 46 +
cdp/fetch.py | 75 +-
cdp/headless_experimental.py | 8 +-
cdp/heap_profiler.py | 20 +-
cdp/input_.py | 246 +-
cdp/io.py | 4 +-
cdp/layer_tree.py | 14 +-
cdp/log.py | 5 +
cdp/media.py | 253 +
cdp/network.py | 1427 +-
cdp/overlay.py | 817 +-
cdp/page.py | 1112 +-
cdp/performance.py | 16 +-
cdp/performance_timeline.py | 200 +
cdp/profiler.py | 49 +-
cdp/runtime.py | 89 +-
cdp/security.py | 210 +-
cdp/service_worker.py | 21 +
cdp/storage.py | 328 +-
cdp/system_info.py | 34 +-
cdp/target.py | 105 +-
cdp/tracing.py | 63 +-
cdp/web_audio.py | 407 +-
cdp/web_authn.py | 161 +-
docs/api/accessibility.rst | 22 +-
docs/api/audits.rst | 215 +-
docs/api/browser.rst | 42 +-
docs/api/cast.rst | 2 +
docs/api/css.rst | 25 +
docs/api/debugger.rst | 17 +
docs/api/dom.rst | 22 +
docs/api/dom_debugger.rst | 7 +
docs/api/emulation.rst | 37 +
docs/api/event_breakpoints.rst | 41 +
docs/api/fetch.rst | 4 +-
docs/api/input_.rst | 30 +-
docs/api/media.rst | 98 +
docs/api/network.rst | 189 +-
docs/api/overlay.rst | 105 +
docs/api/page.rst | 169 +-
...ion_cache.rst => performance_timeline.rst} | 33 +-
docs/api/profiler.rst | 5 +
docs/api/security.rst | 25 +
docs/api/service_worker.rst | 2 +
docs/api/storage.rst | 41 +
docs/api/system_info.rst | 5 +
docs/api/target.rst | 7 +-
docs/api/tracing.rst | 10 +
docs/api/web_audio.rst | 94 +-
docs/api/web_authn.rst | 11 +
generator/browser_protocol.json | 19475 ++++++++++------
generator/generate.py | 37 +-
generator/js_protocol.json | 356 +-
generator/test_generate.py | 8 +-
68 files changed, 22449 insertions(+), 7340 deletions(-)
delete mode 100644 cdp/application_cache.py
create mode 100644 cdp/event_breakpoints.py
create mode 100644 cdp/media.py
create mode 100644 cdp/performance_timeline.py
create mode 100644 docs/api/event_breakpoints.rst
create mode 100644 docs/api/media.rst
rename docs/api/{application_cache.rst => performance_timeline.rst} (77%)
diff --git a/cdp/__init__.py b/cdp/__init__.py
index de3a091..5ae05c7 100644
--- a/cdp/__init__.py
+++ b/cdp/__init__.py
@@ -7,7 +7,6 @@
import cdp.accessibility
import cdp.animation
-import cdp.application_cache
import cdp.audits
import cdp.background_service
import cdp.browser
@@ -23,6 +22,7 @@
import cdp.debugger
import cdp.device_orientation
import cdp.emulation
+import cdp.event_breakpoints
import cdp.fetch
import cdp.headless_experimental
import cdp.heap_profiler
@@ -32,11 +32,13 @@
import cdp.inspector
import cdp.layer_tree
import cdp.log
+import cdp.media
import cdp.memory
import cdp.network
import cdp.overlay
import cdp.page
import cdp.performance
+import cdp.performance_timeline
import cdp.profiler
import cdp.runtime
import cdp.schema
diff --git a/cdp/accessibility.py b/cdp/accessibility.py
index f355f85..010cf03 100644
--- a/cdp/accessibility.py
+++ b/cdp/accessibility.py
@@ -12,6 +12,7 @@
import typing
from . import dom
+from . import page
from . import runtime
@@ -83,11 +84,13 @@ class AXValueNativeSourceType(enum.Enum):
'''
Enum of possible native property sources (as a subtype of a particular AXValueSourceType).
'''
+ DESCRIPTION = "description"
FIGCAPTION = "figcaption"
LABEL = "label"
LABELFOR = "labelfor"
LABELWRAPPED = "labelwrapped"
LEGEND = "legend"
+ RUBYANNOTATION = "rubyannotation"
TABLECAPTION = "tablecaption"
TITLE = "title"
OTHER = "other"
@@ -343,12 +346,18 @@ class AXNode:
#: All other properties
properties: typing.Optional[typing.List[AXProperty]] = None
+ #: ID for this node's parent.
+ parent_id: typing.Optional[AXNodeId] = None
+
#: IDs for each of this node's child nodes.
child_ids: typing.Optional[typing.List[AXNodeId]] = None
#: The backend ID for the associated DOM node, if any.
backend_dom_node_id: typing.Optional[dom.BackendNodeId] = None
+ #: The frame ID for the frame associated with this nodes document.
+ frame_id: typing.Optional[page.FrameId] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['nodeId'] = self.node_id.to_json()
@@ -365,10 +374,14 @@ def to_json(self) -> T_JSON_DICT:
json['value'] = self.value.to_json()
if self.properties is not None:
json['properties'] = [i.to_json() for i in self.properties]
+ if self.parent_id is not None:
+ json['parentId'] = self.parent_id.to_json()
if self.child_ids is not None:
json['childIds'] = [i.to_json() for i in self.child_ids]
if self.backend_dom_node_id is not None:
json['backendDOMNodeId'] = self.backend_dom_node_id.to_json()
+ if self.frame_id is not None:
+ json['frameId'] = self.frame_id.to_json()
return json
@classmethod
@@ -382,8 +395,10 @@ def from_json(cls, json: T_JSON_DICT) -> AXNode:
description=AXValue.from_json(json['description']) if 'description' in json else None,
value=AXValue.from_json(json['value']) if 'value' in json else None,
properties=[AXProperty.from_json(i) for i in json['properties']] if 'properties' in json else None,
+ parent_id=AXNodeId.from_json(json['parentId']) if 'parentId' in json else None,
child_ids=[AXNodeId.from_json(i) for i in json['childIds']] if 'childIds' in json else None,
backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']) if 'backendDOMNodeId' in json else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
)
@@ -442,16 +457,190 @@ def get_partial_ax_tree(
return [AXNode.from_json(i) for i in json['nodes']]
-def get_full_ax_tree() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
+def get_full_ax_tree(
+ depth: typing.Optional[int] = None,
+ max_depth: typing.Optional[int] = None,
+ frame_id: typing.Optional[page.FrameId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
'''
- Fetches the entire accessibility tree
+ Fetches the entire accessibility tree for the root Document
**EXPERIMENTAL**
+ :param depth: *(Optional)* The maximum depth at which descendants of the root node should be retrieved. If omitted, the full tree is returned.
+ :param max_depth: **(DEPRECATED)** *(Optional)* Deprecated. This parameter has been renamed to ```depth```. If depth is not provided, max_depth will be used.
+ :param frame_id: *(Optional)* The frame for whose document the AX tree should be retrieved. If omited, the root frame is used.
:returns:
'''
+ params: T_JSON_DICT = dict()
+ if depth is not None:
+ params['depth'] = depth
+ if max_depth is not None:
+ params['max_depth'] = max_depth
+ if frame_id is not None:
+ params['frameId'] = frame_id.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Accessibility.getFullAXTree',
+ 'params': params,
}
json = yield cmd_dict
return [AXNode.from_json(i) for i in json['nodes']]
+
+
+def get_root_ax_node(
+ frame_id: typing.Optional[page.FrameId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,AXNode]:
+ '''
+ Fetches the root node.
+ Requires ``enable()`` to have been called previously.
+
+ **EXPERIMENTAL**
+
+ :param frame_id: *(Optional)* The frame in whose document the node resides. If omitted, the root frame is used.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ if frame_id is not None:
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Accessibility.getRootAXNode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return AXNode.from_json(json['node'])
+
+
+def get_ax_node_and_ancestors(
+ node_id: typing.Optional[dom.NodeId] = None,
+ backend_node_id: typing.Optional[dom.BackendNodeId] = None,
+ object_id: typing.Optional[runtime.RemoteObjectId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
+ '''
+ Fetches a node and all ancestors up to and including the root.
+ Requires ``enable()`` to have been called previously.
+
+ **EXPERIMENTAL**
+
+ :param node_id: *(Optional)* Identifier of the node to get.
+ :param backend_node_id: *(Optional)* Identifier of the backend node to get.
+ :param object_id: *(Optional)* JavaScript object id of the node wrapper to get.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ if node_id is not None:
+ params['nodeId'] = node_id.to_json()
+ if backend_node_id is not None:
+ params['backendNodeId'] = backend_node_id.to_json()
+ if object_id is not None:
+ params['objectId'] = object_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Accessibility.getAXNodeAndAncestors',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [AXNode.from_json(i) for i in json['nodes']]
+
+
+def get_child_ax_nodes(
+ id_: AXNodeId,
+ frame_id: typing.Optional[page.FrameId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
+ '''
+ Fetches a particular accessibility node by AXNodeId.
+ Requires ``enable()`` to have been called previously.
+
+ **EXPERIMENTAL**
+
+ :param id_:
+ :param frame_id: *(Optional)* The frame in whose document the node resides. If omitted, the root frame is used.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['id'] = id_.to_json()
+ if frame_id is not None:
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Accessibility.getChildAXNodes',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [AXNode.from_json(i) for i in json['nodes']]
+
+
+def query_ax_tree(
+ node_id: typing.Optional[dom.NodeId] = None,
+ backend_node_id: typing.Optional[dom.BackendNodeId] = None,
+ object_id: typing.Optional[runtime.RemoteObjectId] = None,
+ accessible_name: typing.Optional[str] = None,
+ role: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
+ '''
+ Query a DOM node's accessibility subtree for accessible name and role.
+ This command computes the name and role for all nodes in the subtree, including those that are
+ ignored for accessibility, and returns those that mactch the specified name and role. If no DOM
+ node is specified, or the DOM node does not exist, the command returns an error. If neither
+ ``accessibleName`` or ``role`` is specified, it returns all the accessibility nodes in the subtree.
+
+ **EXPERIMENTAL**
+
+ :param node_id: *(Optional)* Identifier of the node for the root to query.
+ :param backend_node_id: *(Optional)* Identifier of the backend node for the root to query.
+ :param object_id: *(Optional)* JavaScript object id of the node wrapper for the root to query.
+ :param accessible_name: *(Optional)* Find nodes with this computed name.
+ :param role: *(Optional)* Find nodes with this computed role.
+ :returns: A list of ``Accessibility.AXNode`` matching the specified attributes, including nodes that are ignored for accessibility.
+ '''
+ params: T_JSON_DICT = dict()
+ if node_id is not None:
+ params['nodeId'] = node_id.to_json()
+ if backend_node_id is not None:
+ params['backendNodeId'] = backend_node_id.to_json()
+ if object_id is not None:
+ params['objectId'] = object_id.to_json()
+ if accessible_name is not None:
+ params['accessibleName'] = accessible_name
+ if role is not None:
+ params['role'] = role
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Accessibility.queryAXTree',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [AXNode.from_json(i) for i in json['nodes']]
+
+
+@event_class('Accessibility.loadComplete')
+@dataclass
+class LoadComplete:
+ '''
+ **EXPERIMENTAL**
+
+ The loadComplete event mirrors the load complete event sent by the browser to assistive
+ technology when the web page has finished loading.
+ '''
+ #: New document root node.
+ root: AXNode
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LoadComplete:
+ return cls(
+ root=AXNode.from_json(json['root'])
+ )
+
+
+@event_class('Accessibility.nodesUpdated')
+@dataclass
+class NodesUpdated:
+ '''
+ **EXPERIMENTAL**
+
+ The nodesUpdated event is sent every time a previously requested node has changed the in tree.
+ '''
+ #: Updated node data.
+ nodes: typing.List[AXNode]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NodesUpdated:
+ return cls(
+ nodes=[AXNode.from_json(i) for i in json['nodes']]
+ )
diff --git a/cdp/application_cache.py b/cdp/application_cache.py
deleted file mode 100644
index 9ae01b4..0000000
--- a/cdp/application_cache.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# DO NOT EDIT THIS FILE!
-#
-# This file is generated from the CDP specification. If you need to make
-# changes, edit the generator and regenerate all of the modules.
-#
-# CDP domain: ApplicationCache (experimental)
-
-from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
-import enum
-import typing
-
-from . import page
-
-
-@dataclass
-class ApplicationCacheResource:
- '''
- Detailed application cache resource information.
- '''
- #: Resource url.
- url: str
-
- #: Resource size.
- size: int
-
- #: Resource type.
- type_: str
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['url'] = self.url
- json['size'] = self.size
- json['type'] = self.type_
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> ApplicationCacheResource:
- return cls(
- url=str(json['url']),
- size=int(json['size']),
- type_=str(json['type']),
- )
-
-
-@dataclass
-class ApplicationCache:
- '''
- Detailed application cache information.
- '''
- #: Manifest URL.
- manifest_url: str
-
- #: Application cache size.
- size: float
-
- #: Application cache creation time.
- creation_time: float
-
- #: Application cache update time.
- update_time: float
-
- #: Application cache resources.
- resources: typing.List[ApplicationCacheResource]
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['manifestURL'] = self.manifest_url
- json['size'] = self.size
- json['creationTime'] = self.creation_time
- json['updateTime'] = self.update_time
- json['resources'] = [i.to_json() for i in self.resources]
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> ApplicationCache:
- return cls(
- manifest_url=str(json['manifestURL']),
- size=float(json['size']),
- creation_time=float(json['creationTime']),
- update_time=float(json['updateTime']),
- resources=[ApplicationCacheResource.from_json(i) for i in json['resources']],
- )
-
-
-@dataclass
-class FrameWithManifest:
- '''
- Frame identifier - manifest URL pair.
- '''
- #: Frame identifier.
- frame_id: page.FrameId
-
- #: Manifest URL.
- manifest_url: str
-
- #: Application cache status.
- status: int
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['frameId'] = self.frame_id.to_json()
- json['manifestURL'] = self.manifest_url
- json['status'] = self.status
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> FrameWithManifest:
- return cls(
- frame_id=page.FrameId.from_json(json['frameId']),
- manifest_url=str(json['manifestURL']),
- status=int(json['status']),
- )
-
-
-def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Enables application cache domain notifications.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'ApplicationCache.enable',
- }
- json = yield cmd_dict
-
-
-def get_application_cache_for_frame(
- frame_id: page.FrameId
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,ApplicationCache]:
- '''
- Returns relevant application cache data for the document in given frame.
-
- :param frame_id: Identifier of the frame containing document whose application cache is retrieved.
- :returns: Relevant application cache data for the document in given frame.
- '''
- params: T_JSON_DICT = dict()
- params['frameId'] = frame_id.to_json()
- cmd_dict: T_JSON_DICT = {
- 'method': 'ApplicationCache.getApplicationCacheForFrame',
- 'params': params,
- }
- json = yield cmd_dict
- return ApplicationCache.from_json(json['applicationCache'])
-
-
-def get_frames_with_manifests() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[FrameWithManifest]]:
- '''
- Returns array of frame identifiers with manifest urls for each frame containing a document
- associated with some application cache.
-
- :returns: Array of frame identifiers with manifest urls for each frame containing a document associated with some application cache.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'ApplicationCache.getFramesWithManifests',
- }
- json = yield cmd_dict
- return [FrameWithManifest.from_json(i) for i in json['frameIds']]
-
-
-def get_manifest_for_frame(
- frame_id: page.FrameId
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
- '''
- Returns manifest URL for document in the given frame.
-
- :param frame_id: Identifier of the frame containing document whose manifest is retrieved.
- :returns: Manifest URL for document in the given frame.
- '''
- params: T_JSON_DICT = dict()
- params['frameId'] = frame_id.to_json()
- cmd_dict: T_JSON_DICT = {
- 'method': 'ApplicationCache.getManifestForFrame',
- 'params': params,
- }
- json = yield cmd_dict
- return str(json['manifestURL'])
-
-
-@event_class('ApplicationCache.applicationCacheStatusUpdated')
-@dataclass
-class ApplicationCacheStatusUpdated:
- #: Identifier of the frame containing document whose application cache updated status.
- frame_id: page.FrameId
- #: Manifest URL.
- manifest_url: str
- #: Updated application cache status.
- status: int
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> ApplicationCacheStatusUpdated:
- return cls(
- frame_id=page.FrameId.from_json(json['frameId']),
- manifest_url=str(json['manifestURL']),
- status=int(json['status'])
- )
-
-
-@event_class('ApplicationCache.networkStateUpdated')
-@dataclass
-class NetworkStateUpdated:
- is_now_online: bool
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> NetworkStateUpdated:
- return cls(
- is_now_online=bool(json['isNowOnline'])
- )
diff --git a/cdp/audits.py b/cdp/audits.py
index 67c4dde..40e3487 100644
--- a/cdp/audits.py
+++ b/cdp/audits.py
@@ -11,7 +11,1124 @@
import enum
import typing
+from . import dom
from . import network
+from . import page
+from . import runtime
+
+
+@dataclass
+class AffectedCookie:
+ '''
+ Information about a cookie that is affected by an inspector issue.
+ '''
+ #: The following three properties uniquely identify a cookie
+ name: str
+
+ path: str
+
+ domain: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['path'] = self.path
+ json['domain'] = self.domain
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AffectedCookie:
+ return cls(
+ name=str(json['name']),
+ path=str(json['path']),
+ domain=str(json['domain']),
+ )
+
+
+@dataclass
+class AffectedRequest:
+ '''
+ Information about a request that is affected by an inspector issue.
+ '''
+ #: The unique request id.
+ request_id: network.RequestId
+
+ url: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['requestId'] = self.request_id.to_json()
+ if self.url is not None:
+ json['url'] = self.url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AffectedRequest:
+ return cls(
+ request_id=network.RequestId.from_json(json['requestId']),
+ url=str(json['url']) if 'url' in json else None,
+ )
+
+
+@dataclass
+class AffectedFrame:
+ '''
+ Information about the frame affected by an inspector issue.
+ '''
+ frame_id: page.FrameId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['frameId'] = self.frame_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AffectedFrame:
+ return cls(
+ frame_id=page.FrameId.from_json(json['frameId']),
+ )
+
+
+class SameSiteCookieExclusionReason(enum.Enum):
+ EXCLUDE_SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "ExcludeSameSiteUnspecifiedTreatedAsLax"
+ EXCLUDE_SAME_SITE_NONE_INSECURE = "ExcludeSameSiteNoneInsecure"
+ EXCLUDE_SAME_SITE_LAX = "ExcludeSameSiteLax"
+ EXCLUDE_SAME_SITE_STRICT = "ExcludeSameSiteStrict"
+ EXCLUDE_INVALID_SAME_PARTY = "ExcludeInvalidSameParty"
+ EXCLUDE_SAME_PARTY_CROSS_PARTY_CONTEXT = "ExcludeSamePartyCrossPartyContext"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SameSiteCookieExclusionReason:
+ return cls(json)
+
+
+class SameSiteCookieWarningReason(enum.Enum):
+ WARN_SAME_SITE_UNSPECIFIED_CROSS_SITE_CONTEXT = "WarnSameSiteUnspecifiedCrossSiteContext"
+ WARN_SAME_SITE_NONE_INSECURE = "WarnSameSiteNoneInsecure"
+ WARN_SAME_SITE_UNSPECIFIED_LAX_ALLOW_UNSAFE = "WarnSameSiteUnspecifiedLaxAllowUnsafe"
+ WARN_SAME_SITE_STRICT_LAX_DOWNGRADE_STRICT = "WarnSameSiteStrictLaxDowngradeStrict"
+ WARN_SAME_SITE_STRICT_CROSS_DOWNGRADE_STRICT = "WarnSameSiteStrictCrossDowngradeStrict"
+ WARN_SAME_SITE_STRICT_CROSS_DOWNGRADE_LAX = "WarnSameSiteStrictCrossDowngradeLax"
+ WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_STRICT = "WarnSameSiteLaxCrossDowngradeStrict"
+ WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_LAX = "WarnSameSiteLaxCrossDowngradeLax"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SameSiteCookieWarningReason:
+ return cls(json)
+
+
+class SameSiteCookieOperation(enum.Enum):
+ SET_COOKIE = "SetCookie"
+ READ_COOKIE = "ReadCookie"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SameSiteCookieOperation:
+ return cls(json)
+
+
+@dataclass
+class SameSiteCookieIssueDetails:
+ '''
+ This information is currently necessary, as the front-end has a difficult
+ time finding a specific cookie. With this, we can convey specific error
+ information without the cookie.
+ '''
+ cookie_warning_reasons: typing.List[SameSiteCookieWarningReason]
+
+ cookie_exclusion_reasons: typing.List[SameSiteCookieExclusionReason]
+
+ #: Optionally identifies the site-for-cookies and the cookie url, which
+ #: may be used by the front-end as additional context.
+ operation: SameSiteCookieOperation
+
+ #: If AffectedCookie is not set then rawCookieLine contains the raw
+ #: Set-Cookie header string. This hints at a problem where the
+ #: cookie line is syntactically or semantically malformed in a way
+ #: that no valid cookie could be created.
+ cookie: typing.Optional[AffectedCookie] = None
+
+ raw_cookie_line: typing.Optional[str] = None
+
+ site_for_cookies: typing.Optional[str] = None
+
+ cookie_url: typing.Optional[str] = None
+
+ request: typing.Optional[AffectedRequest] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['cookieWarningReasons'] = [i.to_json() for i in self.cookie_warning_reasons]
+ json['cookieExclusionReasons'] = [i.to_json() for i in self.cookie_exclusion_reasons]
+ json['operation'] = self.operation.to_json()
+ if self.cookie is not None:
+ json['cookie'] = self.cookie.to_json()
+ if self.raw_cookie_line is not None:
+ json['rawCookieLine'] = self.raw_cookie_line
+ if self.site_for_cookies is not None:
+ json['siteForCookies'] = self.site_for_cookies
+ if self.cookie_url is not None:
+ json['cookieUrl'] = self.cookie_url
+ if self.request is not None:
+ json['request'] = self.request.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SameSiteCookieIssueDetails:
+ return cls(
+ cookie_warning_reasons=[SameSiteCookieWarningReason.from_json(i) for i in json['cookieWarningReasons']],
+ cookie_exclusion_reasons=[SameSiteCookieExclusionReason.from_json(i) for i in json['cookieExclusionReasons']],
+ operation=SameSiteCookieOperation.from_json(json['operation']),
+ cookie=AffectedCookie.from_json(json['cookie']) if 'cookie' in json else None,
+ raw_cookie_line=str(json['rawCookieLine']) if 'rawCookieLine' in json else None,
+ site_for_cookies=str(json['siteForCookies']) if 'siteForCookies' in json else None,
+ cookie_url=str(json['cookieUrl']) if 'cookieUrl' in json else None,
+ request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
+ )
+
+
+class MixedContentResolutionStatus(enum.Enum):
+ MIXED_CONTENT_BLOCKED = "MixedContentBlocked"
+ MIXED_CONTENT_AUTOMATICALLY_UPGRADED = "MixedContentAutomaticallyUpgraded"
+ MIXED_CONTENT_WARNING = "MixedContentWarning"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> MixedContentResolutionStatus:
+ return cls(json)
+
+
+class MixedContentResourceType(enum.Enum):
+ AUDIO = "Audio"
+ BEACON = "Beacon"
+ CSP_REPORT = "CSPReport"
+ DOWNLOAD = "Download"
+ EVENT_SOURCE = "EventSource"
+ FAVICON = "Favicon"
+ FONT = "Font"
+ FORM = "Form"
+ FRAME = "Frame"
+ IMAGE = "Image"
+ IMPORT = "Import"
+ MANIFEST = "Manifest"
+ PING = "Ping"
+ PLUGIN_DATA = "PluginData"
+ PLUGIN_RESOURCE = "PluginResource"
+ PREFETCH = "Prefetch"
+ RESOURCE = "Resource"
+ SCRIPT = "Script"
+ SERVICE_WORKER = "ServiceWorker"
+ SHARED_WORKER = "SharedWorker"
+ STYLESHEET = "Stylesheet"
+ TRACK = "Track"
+ VIDEO = "Video"
+ WORKER = "Worker"
+ XML_HTTP_REQUEST = "XMLHttpRequest"
+ XSLT = "XSLT"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> MixedContentResourceType:
+ return cls(json)
+
+
+@dataclass
+class MixedContentIssueDetails:
+ #: The way the mixed content issue is being resolved.
+ resolution_status: MixedContentResolutionStatus
+
+ #: The unsafe http url causing the mixed content issue.
+ insecure_url: str
+
+ #: The url responsible for the call to an unsafe url.
+ main_resource_url: str
+
+ #: The type of resource causing the mixed content issue (css, js, iframe,
+ #: form,...). Marked as optional because it is mapped to from
+ #: blink::mojom::RequestContextType, which will be replaced
+ #: by network::mojom::RequestDestination
+ resource_type: typing.Optional[MixedContentResourceType] = None
+
+ #: The mixed content request.
+ #: Does not always exist (e.g. for unsafe form submission urls).
+ request: typing.Optional[AffectedRequest] = None
+
+ #: Optional because not every mixed content issue is necessarily linked to a frame.
+ frame: typing.Optional[AffectedFrame] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['resolutionStatus'] = self.resolution_status.to_json()
+ json['insecureURL'] = self.insecure_url
+ json['mainResourceURL'] = self.main_resource_url
+ if self.resource_type is not None:
+ json['resourceType'] = self.resource_type.to_json()
+ if self.request is not None:
+ json['request'] = self.request.to_json()
+ if self.frame is not None:
+ json['frame'] = self.frame.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> MixedContentIssueDetails:
+ return cls(
+ resolution_status=MixedContentResolutionStatus.from_json(json['resolutionStatus']),
+ insecure_url=str(json['insecureURL']),
+ main_resource_url=str(json['mainResourceURL']),
+ resource_type=MixedContentResourceType.from_json(json['resourceType']) if 'resourceType' in json else None,
+ request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
+ frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None,
+ )
+
+
+class BlockedByResponseReason(enum.Enum):
+ '''
+ Enum indicating the reason a response has been blocked. These reasons are
+ refinements of the net error BLOCKED_BY_RESPONSE.
+ '''
+ COEP_FRAME_RESOURCE_NEEDS_COEP_HEADER = "CoepFrameResourceNeedsCoepHeader"
+ COOP_SANDBOXED_I_FRAME_CANNOT_NAVIGATE_TO_COOP_PAGE = "CoopSandboxedIFrameCannotNavigateToCoopPage"
+ CORP_NOT_SAME_ORIGIN = "CorpNotSameOrigin"
+ CORP_NOT_SAME_ORIGIN_AFTER_DEFAULTED_TO_SAME_ORIGIN_BY_COEP = "CorpNotSameOriginAfterDefaultedToSameOriginByCoep"
+ CORP_NOT_SAME_SITE = "CorpNotSameSite"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> BlockedByResponseReason:
+ return cls(json)
+
+
+@dataclass
+class BlockedByResponseIssueDetails:
+ '''
+ Details for a request that has been blocked with the BLOCKED_BY_RESPONSE
+ code. Currently only used for COEP/COOP, but may be extended to include
+ some CSP errors in the future.
+ '''
+ request: AffectedRequest
+
+ reason: BlockedByResponseReason
+
+ parent_frame: typing.Optional[AffectedFrame] = None
+
+ blocked_frame: typing.Optional[AffectedFrame] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['request'] = self.request.to_json()
+ json['reason'] = self.reason.to_json()
+ if self.parent_frame is not None:
+ json['parentFrame'] = self.parent_frame.to_json()
+ if self.blocked_frame is not None:
+ json['blockedFrame'] = self.blocked_frame.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BlockedByResponseIssueDetails:
+ return cls(
+ request=AffectedRequest.from_json(json['request']),
+ reason=BlockedByResponseReason.from_json(json['reason']),
+ parent_frame=AffectedFrame.from_json(json['parentFrame']) if 'parentFrame' in json else None,
+ blocked_frame=AffectedFrame.from_json(json['blockedFrame']) if 'blockedFrame' in json else None,
+ )
+
+
+class HeavyAdResolutionStatus(enum.Enum):
+ HEAVY_AD_BLOCKED = "HeavyAdBlocked"
+ HEAVY_AD_WARNING = "HeavyAdWarning"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> HeavyAdResolutionStatus:
+ return cls(json)
+
+
+class HeavyAdReason(enum.Enum):
+ NETWORK_TOTAL_LIMIT = "NetworkTotalLimit"
+ CPU_TOTAL_LIMIT = "CpuTotalLimit"
+ CPU_PEAK_LIMIT = "CpuPeakLimit"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> HeavyAdReason:
+ return cls(json)
+
+
+@dataclass
+class HeavyAdIssueDetails:
+ #: The resolution status, either blocking the content or warning.
+ resolution: HeavyAdResolutionStatus
+
+ #: The reason the ad was blocked, total network or cpu or peak cpu.
+ reason: HeavyAdReason
+
+ #: The frame that was blocked.
+ frame: AffectedFrame
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['resolution'] = self.resolution.to_json()
+ json['reason'] = self.reason.to_json()
+ json['frame'] = self.frame.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> HeavyAdIssueDetails:
+ return cls(
+ resolution=HeavyAdResolutionStatus.from_json(json['resolution']),
+ reason=HeavyAdReason.from_json(json['reason']),
+ frame=AffectedFrame.from_json(json['frame']),
+ )
+
+
+class ContentSecurityPolicyViolationType(enum.Enum):
+ K_INLINE_VIOLATION = "kInlineViolation"
+ K_EVAL_VIOLATION = "kEvalViolation"
+ K_URL_VIOLATION = "kURLViolation"
+ K_TRUSTED_TYPES_SINK_VIOLATION = "kTrustedTypesSinkViolation"
+ K_TRUSTED_TYPES_POLICY_VIOLATION = "kTrustedTypesPolicyViolation"
+ K_WASM_EVAL_VIOLATION = "kWasmEvalViolation"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ContentSecurityPolicyViolationType:
+ return cls(json)
+
+
+@dataclass
+class SourceCodeLocation:
+ url: str
+
+ line_number: int
+
+ column_number: int
+
+ script_id: typing.Optional[runtime.ScriptId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ json['lineNumber'] = self.line_number
+ json['columnNumber'] = self.column_number
+ if self.script_id is not None:
+ json['scriptId'] = self.script_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SourceCodeLocation:
+ return cls(
+ url=str(json['url']),
+ line_number=int(json['lineNumber']),
+ column_number=int(json['columnNumber']),
+ script_id=runtime.ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None,
+ )
+
+
+@dataclass
+class ContentSecurityPolicyIssueDetails:
+ #: Specific directive that is violated, causing the CSP issue.
+ violated_directive: str
+
+ is_report_only: bool
+
+ content_security_policy_violation_type: ContentSecurityPolicyViolationType
+
+ #: The url not included in allowed sources.
+ blocked_url: typing.Optional[str] = None
+
+ frame_ancestor: typing.Optional[AffectedFrame] = None
+
+ source_code_location: typing.Optional[SourceCodeLocation] = None
+
+ violating_node_id: typing.Optional[dom.BackendNodeId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['violatedDirective'] = self.violated_directive
+ json['isReportOnly'] = self.is_report_only
+ json['contentSecurityPolicyViolationType'] = self.content_security_policy_violation_type.to_json()
+ if self.blocked_url is not None:
+ json['blockedURL'] = self.blocked_url
+ if self.frame_ancestor is not None:
+ json['frameAncestor'] = self.frame_ancestor.to_json()
+ if self.source_code_location is not None:
+ json['sourceCodeLocation'] = self.source_code_location.to_json()
+ if self.violating_node_id is not None:
+ json['violatingNodeId'] = self.violating_node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ContentSecurityPolicyIssueDetails:
+ return cls(
+ violated_directive=str(json['violatedDirective']),
+ is_report_only=bool(json['isReportOnly']),
+ content_security_policy_violation_type=ContentSecurityPolicyViolationType.from_json(json['contentSecurityPolicyViolationType']),
+ blocked_url=str(json['blockedURL']) if 'blockedURL' in json else None,
+ frame_ancestor=AffectedFrame.from_json(json['frameAncestor']) if 'frameAncestor' in json else None,
+ source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']) if 'sourceCodeLocation' in json else None,
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None,
+ )
+
+
+class SharedArrayBufferIssueType(enum.Enum):
+ TRANSFER_ISSUE = "TransferIssue"
+ CREATION_ISSUE = "CreationIssue"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SharedArrayBufferIssueType:
+ return cls(json)
+
+
+@dataclass
+class SharedArrayBufferIssueDetails:
+ '''
+ Details for a issue arising from an SAB being instantiated in, or
+ transferred to a context that is not cross-origin isolated.
+ '''
+ source_code_location: SourceCodeLocation
+
+ is_warning: bool
+
+ type_: SharedArrayBufferIssueType
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['sourceCodeLocation'] = self.source_code_location.to_json()
+ json['isWarning'] = self.is_warning
+ json['type'] = self.type_.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedArrayBufferIssueDetails:
+ return cls(
+ source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']),
+ is_warning=bool(json['isWarning']),
+ type_=SharedArrayBufferIssueType.from_json(json['type']),
+ )
+
+
+class TwaQualityEnforcementViolationType(enum.Enum):
+ K_HTTP_ERROR = "kHttpError"
+ K_UNAVAILABLE_OFFLINE = "kUnavailableOffline"
+ K_DIGITAL_ASSET_LINKS = "kDigitalAssetLinks"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> TwaQualityEnforcementViolationType:
+ return cls(json)
+
+
+@dataclass
+class TrustedWebActivityIssueDetails:
+ #: The url that triggers the violation.
+ url: str
+
+ violation_type: TwaQualityEnforcementViolationType
+
+ http_status_code: typing.Optional[int] = None
+
+ #: The package name of the Trusted Web Activity client app. This field is
+ #: only used when violation type is kDigitalAssetLinks.
+ package_name: typing.Optional[str] = None
+
+ #: The signature of the Trusted Web Activity client app. This field is only
+ #: used when violation type is kDigitalAssetLinks.
+ signature: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ json['violationType'] = self.violation_type.to_json()
+ if self.http_status_code is not None:
+ json['httpStatusCode'] = self.http_status_code
+ if self.package_name is not None:
+ json['packageName'] = self.package_name
+ if self.signature is not None:
+ json['signature'] = self.signature
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TrustedWebActivityIssueDetails:
+ return cls(
+ url=str(json['url']),
+ violation_type=TwaQualityEnforcementViolationType.from_json(json['violationType']),
+ http_status_code=int(json['httpStatusCode']) if 'httpStatusCode' in json else None,
+ package_name=str(json['packageName']) if 'packageName' in json else None,
+ signature=str(json['signature']) if 'signature' in json else None,
+ )
+
+
+@dataclass
+class LowTextContrastIssueDetails:
+ violating_node_id: dom.BackendNodeId
+
+ violating_node_selector: str
+
+ contrast_ratio: float
+
+ threshold_aa: float
+
+ threshold_aaa: float
+
+ font_size: str
+
+ font_weight: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['violatingNodeId'] = self.violating_node_id.to_json()
+ json['violatingNodeSelector'] = self.violating_node_selector
+ json['contrastRatio'] = self.contrast_ratio
+ json['thresholdAA'] = self.threshold_aa
+ json['thresholdAAA'] = self.threshold_aaa
+ json['fontSize'] = self.font_size
+ json['fontWeight'] = self.font_weight
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LowTextContrastIssueDetails:
+ return cls(
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']),
+ violating_node_selector=str(json['violatingNodeSelector']),
+ contrast_ratio=float(json['contrastRatio']),
+ threshold_aa=float(json['thresholdAA']),
+ threshold_aaa=float(json['thresholdAAA']),
+ font_size=str(json['fontSize']),
+ font_weight=str(json['fontWeight']),
+ )
+
+
+@dataclass
+class CorsIssueDetails:
+ '''
+ Details for a CORS related issue, e.g. a warning or error related to
+ CORS RFC1918 enforcement.
+ '''
+ cors_error_status: network.CorsErrorStatus
+
+ is_warning: bool
+
+ request: AffectedRequest
+
+ location: typing.Optional[SourceCodeLocation] = None
+
+ initiator_origin: typing.Optional[str] = None
+
+ resource_ip_address_space: typing.Optional[network.IPAddressSpace] = None
+
+ client_security_state: typing.Optional[network.ClientSecurityState] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['corsErrorStatus'] = self.cors_error_status.to_json()
+ json['isWarning'] = self.is_warning
+ json['request'] = self.request.to_json()
+ if self.location is not None:
+ json['location'] = self.location.to_json()
+ if self.initiator_origin is not None:
+ json['initiatorOrigin'] = self.initiator_origin
+ if self.resource_ip_address_space is not None:
+ json['resourceIPAddressSpace'] = self.resource_ip_address_space.to_json()
+ if self.client_security_state is not None:
+ json['clientSecurityState'] = self.client_security_state.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CorsIssueDetails:
+ return cls(
+ cors_error_status=network.CorsErrorStatus.from_json(json['corsErrorStatus']),
+ is_warning=bool(json['isWarning']),
+ request=AffectedRequest.from_json(json['request']),
+ location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None,
+ initiator_origin=str(json['initiatorOrigin']) if 'initiatorOrigin' in json else None,
+ resource_ip_address_space=network.IPAddressSpace.from_json(json['resourceIPAddressSpace']) if 'resourceIPAddressSpace' in json else None,
+ client_security_state=network.ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None,
+ )
+
+
+class AttributionReportingIssueType(enum.Enum):
+ PERMISSION_POLICY_DISABLED = "PermissionPolicyDisabled"
+ INVALID_ATTRIBUTION_SOURCE_EVENT_ID = "InvalidAttributionSourceEventId"
+ INVALID_ATTRIBUTION_DATA = "InvalidAttributionData"
+ ATTRIBUTION_SOURCE_UNTRUSTWORTHY_ORIGIN = "AttributionSourceUntrustworthyOrigin"
+ ATTRIBUTION_UNTRUSTWORTHY_ORIGIN = "AttributionUntrustworthyOrigin"
+ ATTRIBUTION_TRIGGER_DATA_TOO_LARGE = "AttributionTriggerDataTooLarge"
+ ATTRIBUTION_EVENT_SOURCE_TRIGGER_DATA_TOO_LARGE = "AttributionEventSourceTriggerDataTooLarge"
+ INVALID_ATTRIBUTION_SOURCE_EXPIRY = "InvalidAttributionSourceExpiry"
+ INVALID_ATTRIBUTION_SOURCE_PRIORITY = "InvalidAttributionSourcePriority"
+ INVALID_EVENT_SOURCE_TRIGGER_DATA = "InvalidEventSourceTriggerData"
+ INVALID_TRIGGER_PRIORITY = "InvalidTriggerPriority"
+ INVALID_TRIGGER_DEDUP_KEY = "InvalidTriggerDedupKey"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AttributionReportingIssueType:
+ return cls(json)
+
+
+@dataclass
+class AttributionReportingIssueDetails:
+ '''
+ Details for issues around "Attribution Reporting API" usage.
+ Explainer: https://github.com/WICG/conversion-measurement-api
+ '''
+ violation_type: AttributionReportingIssueType
+
+ frame: typing.Optional[AffectedFrame] = None
+
+ request: typing.Optional[AffectedRequest] = None
+
+ violating_node_id: typing.Optional[dom.BackendNodeId] = None
+
+ invalid_parameter: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['violationType'] = self.violation_type.to_json()
+ if self.frame is not None:
+ json['frame'] = self.frame.to_json()
+ if self.request is not None:
+ json['request'] = self.request.to_json()
+ if self.violating_node_id is not None:
+ json['violatingNodeId'] = self.violating_node_id.to_json()
+ if self.invalid_parameter is not None:
+ json['invalidParameter'] = self.invalid_parameter
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AttributionReportingIssueDetails:
+ return cls(
+ violation_type=AttributionReportingIssueType.from_json(json['violationType']),
+ frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None,
+ request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None,
+ invalid_parameter=str(json['invalidParameter']) if 'invalidParameter' in json else None,
+ )
+
+
+@dataclass
+class QuirksModeIssueDetails:
+ '''
+ Details for issues about documents in Quirks Mode
+ or Limited Quirks Mode that affects page layouting.
+ '''
+ #: If false, it means the document's mode is "quirks"
+ #: instead of "limited-quirks".
+ is_limited_quirks_mode: bool
+
+ document_node_id: dom.BackendNodeId
+
+ url: str
+
+ frame_id: page.FrameId
+
+ loader_id: network.LoaderId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['isLimitedQuirksMode'] = self.is_limited_quirks_mode
+ json['documentNodeId'] = self.document_node_id.to_json()
+ json['url'] = self.url
+ json['frameId'] = self.frame_id.to_json()
+ json['loaderId'] = self.loader_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> QuirksModeIssueDetails:
+ return cls(
+ is_limited_quirks_mode=bool(json['isLimitedQuirksMode']),
+ document_node_id=dom.BackendNodeId.from_json(json['documentNodeId']),
+ url=str(json['url']),
+ frame_id=page.FrameId.from_json(json['frameId']),
+ loader_id=network.LoaderId.from_json(json['loaderId']),
+ )
+
+
+@dataclass
+class NavigatorUserAgentIssueDetails:
+ url: str
+
+ location: typing.Optional[SourceCodeLocation] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ if self.location is not None:
+ json['location'] = self.location.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NavigatorUserAgentIssueDetails:
+ return cls(
+ url=str(json['url']),
+ location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None,
+ )
+
+
+class GenericIssueErrorType(enum.Enum):
+ CROSS_ORIGIN_PORTAL_POST_MESSAGE_ERROR = "CrossOriginPortalPostMessageError"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> GenericIssueErrorType:
+ return cls(json)
+
+
+@dataclass
+class GenericIssueDetails:
+ '''
+ Depending on the concrete errorType, different properties are set.
+ '''
+ #: Issues with the same errorType are aggregated in the frontend.
+ error_type: GenericIssueErrorType
+
+ frame_id: typing.Optional[page.FrameId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['errorType'] = self.error_type.to_json()
+ if self.frame_id is not None:
+ json['frameId'] = self.frame_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> GenericIssueDetails:
+ return cls(
+ error_type=GenericIssueErrorType.from_json(json['errorType']),
+ frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
+ )
+
+
+@dataclass
+class DeprecationIssueDetails:
+ '''
+ This issue tracks information needed to print a deprecation message.
+ The formatting is inherited from the old console.log version, see more at:
+ https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/deprecation.cc
+ TODO(crbug.com/1264960): Re-work format to add i18n support per:
+ https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/public/devtools_protocol/README.md
+ '''
+ source_code_location: SourceCodeLocation
+
+ deprecation_type: str
+
+ affected_frame: typing.Optional[AffectedFrame] = None
+
+ #: The content of the deprecation issue (this won't be translated),
+ #: e.g. "window.inefficientLegacyStorageMethod will be removed in M97,
+ #: around January 2022. Please use Web Storage or Indexed Database
+ #: instead. This standard was abandoned in January, 1970. See
+ #: https://www.chromestatus.com/feature/5684870116278272 for more details."
+ message: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['sourceCodeLocation'] = self.source_code_location.to_json()
+ json['deprecationType'] = self.deprecation_type
+ if self.affected_frame is not None:
+ json['affectedFrame'] = self.affected_frame.to_json()
+ if self.message is not None:
+ json['message'] = self.message
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DeprecationIssueDetails:
+ return cls(
+ source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']),
+ deprecation_type=str(json['deprecationType']),
+ affected_frame=AffectedFrame.from_json(json['affectedFrame']) if 'affectedFrame' in json else None,
+ message=str(json['message']) if 'message' in json else None,
+ )
+
+
+class ClientHintIssueReason(enum.Enum):
+ META_TAG_ALLOW_LIST_INVALID_ORIGIN = "MetaTagAllowListInvalidOrigin"
+ META_TAG_MODIFIED_HTML = "MetaTagModifiedHTML"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ClientHintIssueReason:
+ return cls(json)
+
+
+@dataclass
+class FederatedAuthRequestIssueDetails:
+ federated_auth_request_issue_reason: FederatedAuthRequestIssueReason
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['federatedAuthRequestIssueReason'] = self.federated_auth_request_issue_reason.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FederatedAuthRequestIssueDetails:
+ return cls(
+ federated_auth_request_issue_reason=FederatedAuthRequestIssueReason.from_json(json['federatedAuthRequestIssueReason']),
+ )
+
+
+class FederatedAuthRequestIssueReason(enum.Enum):
+ '''
+ Represents the failure reason when a federated authentication reason fails.
+ Should be updated alongside RequestIdTokenStatus in
+ third_party/blink/public/mojom/webid/federated_auth_request.mojom to include
+ all cases except for success.
+ '''
+ APPROVAL_DECLINED = "ApprovalDeclined"
+ TOO_MANY_REQUESTS = "TooManyRequests"
+ MANIFEST_HTTP_NOT_FOUND = "ManifestHttpNotFound"
+ MANIFEST_NO_RESPONSE = "ManifestNoResponse"
+ MANIFEST_INVALID_RESPONSE = "ManifestInvalidResponse"
+ CLIENT_METADATA_HTTP_NOT_FOUND = "ClientMetadataHttpNotFound"
+ CLIENT_METADATA_NO_RESPONSE = "ClientMetadataNoResponse"
+ CLIENT_METADATA_INVALID_RESPONSE = "ClientMetadataInvalidResponse"
+ ERROR_FETCHING_SIGNIN = "ErrorFetchingSignin"
+ INVALID_SIGNIN_RESPONSE = "InvalidSigninResponse"
+ ACCOUNTS_HTTP_NOT_FOUND = "AccountsHttpNotFound"
+ ACCOUNTS_NO_RESPONSE = "AccountsNoResponse"
+ ACCOUNTS_INVALID_RESPONSE = "AccountsInvalidResponse"
+ ID_TOKEN_HTTP_NOT_FOUND = "IdTokenHttpNotFound"
+ ID_TOKEN_NO_RESPONSE = "IdTokenNoResponse"
+ ID_TOKEN_INVALID_RESPONSE = "IdTokenInvalidResponse"
+ ID_TOKEN_INVALID_REQUEST = "IdTokenInvalidRequest"
+ ERROR_ID_TOKEN = "ErrorIdToken"
+ CANCELED = "Canceled"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> FederatedAuthRequestIssueReason:
+ return cls(json)
+
+
+@dataclass
+class ClientHintIssueDetails:
+ '''
+ This issue tracks client hints related issues. It's used to deprecate old
+ features, encourage the use of new ones, and provide general guidance.
+ '''
+ source_code_location: SourceCodeLocation
+
+ client_hint_issue_reason: ClientHintIssueReason
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['sourceCodeLocation'] = self.source_code_location.to_json()
+ json['clientHintIssueReason'] = self.client_hint_issue_reason.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ClientHintIssueDetails:
+ return cls(
+ source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']),
+ client_hint_issue_reason=ClientHintIssueReason.from_json(json['clientHintIssueReason']),
+ )
+
+
+class InspectorIssueCode(enum.Enum):
+ '''
+ A unique identifier for the type of issue. Each type may use one of the
+ optional fields in InspectorIssueDetails to convey more specific
+ information about the kind of issue.
+ '''
+ SAME_SITE_COOKIE_ISSUE = "SameSiteCookieIssue"
+ MIXED_CONTENT_ISSUE = "MixedContentIssue"
+ BLOCKED_BY_RESPONSE_ISSUE = "BlockedByResponseIssue"
+ HEAVY_AD_ISSUE = "HeavyAdIssue"
+ CONTENT_SECURITY_POLICY_ISSUE = "ContentSecurityPolicyIssue"
+ SHARED_ARRAY_BUFFER_ISSUE = "SharedArrayBufferIssue"
+ TRUSTED_WEB_ACTIVITY_ISSUE = "TrustedWebActivityIssue"
+ LOW_TEXT_CONTRAST_ISSUE = "LowTextContrastIssue"
+ CORS_ISSUE = "CorsIssue"
+ ATTRIBUTION_REPORTING_ISSUE = "AttributionReportingIssue"
+ QUIRKS_MODE_ISSUE = "QuirksModeIssue"
+ NAVIGATOR_USER_AGENT_ISSUE = "NavigatorUserAgentIssue"
+ GENERIC_ISSUE = "GenericIssue"
+ DEPRECATION_ISSUE = "DeprecationIssue"
+ CLIENT_HINT_ISSUE = "ClientHintIssue"
+ FEDERATED_AUTH_REQUEST_ISSUE = "FederatedAuthRequestIssue"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> InspectorIssueCode:
+ return cls(json)
+
+
+@dataclass
+class InspectorIssueDetails:
+ '''
+ This struct holds a list of optional fields with additional information
+ specific to the kind of issue. When adding a new issue code, please also
+ add a new optional field to this type.
+ '''
+ same_site_cookie_issue_details: typing.Optional[SameSiteCookieIssueDetails] = None
+
+ mixed_content_issue_details: typing.Optional[MixedContentIssueDetails] = None
+
+ blocked_by_response_issue_details: typing.Optional[BlockedByResponseIssueDetails] = None
+
+ heavy_ad_issue_details: typing.Optional[HeavyAdIssueDetails] = None
+
+ content_security_policy_issue_details: typing.Optional[ContentSecurityPolicyIssueDetails] = None
+
+ shared_array_buffer_issue_details: typing.Optional[SharedArrayBufferIssueDetails] = None
+
+ twa_quality_enforcement_details: typing.Optional[TrustedWebActivityIssueDetails] = None
+
+ low_text_contrast_issue_details: typing.Optional[LowTextContrastIssueDetails] = None
+
+ cors_issue_details: typing.Optional[CorsIssueDetails] = None
+
+ attribution_reporting_issue_details: typing.Optional[AttributionReportingIssueDetails] = None
+
+ quirks_mode_issue_details: typing.Optional[QuirksModeIssueDetails] = None
+
+ navigator_user_agent_issue_details: typing.Optional[NavigatorUserAgentIssueDetails] = None
+
+ generic_issue_details: typing.Optional[GenericIssueDetails] = None
+
+ deprecation_issue_details: typing.Optional[DeprecationIssueDetails] = None
+
+ client_hint_issue_details: typing.Optional[ClientHintIssueDetails] = None
+
+ federated_auth_request_issue_details: typing.Optional[FederatedAuthRequestIssueDetails] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.same_site_cookie_issue_details is not None:
+ json['sameSiteCookieIssueDetails'] = self.same_site_cookie_issue_details.to_json()
+ if self.mixed_content_issue_details is not None:
+ json['mixedContentIssueDetails'] = self.mixed_content_issue_details.to_json()
+ if self.blocked_by_response_issue_details is not None:
+ json['blockedByResponseIssueDetails'] = self.blocked_by_response_issue_details.to_json()
+ if self.heavy_ad_issue_details is not None:
+ json['heavyAdIssueDetails'] = self.heavy_ad_issue_details.to_json()
+ if self.content_security_policy_issue_details is not None:
+ json['contentSecurityPolicyIssueDetails'] = self.content_security_policy_issue_details.to_json()
+ if self.shared_array_buffer_issue_details is not None:
+ json['sharedArrayBufferIssueDetails'] = self.shared_array_buffer_issue_details.to_json()
+ if self.twa_quality_enforcement_details is not None:
+ json['twaQualityEnforcementDetails'] = self.twa_quality_enforcement_details.to_json()
+ if self.low_text_contrast_issue_details is not None:
+ json['lowTextContrastIssueDetails'] = self.low_text_contrast_issue_details.to_json()
+ if self.cors_issue_details is not None:
+ json['corsIssueDetails'] = self.cors_issue_details.to_json()
+ if self.attribution_reporting_issue_details is not None:
+ json['attributionReportingIssueDetails'] = self.attribution_reporting_issue_details.to_json()
+ if self.quirks_mode_issue_details is not None:
+ json['quirksModeIssueDetails'] = self.quirks_mode_issue_details.to_json()
+ if self.navigator_user_agent_issue_details is not None:
+ json['navigatorUserAgentIssueDetails'] = self.navigator_user_agent_issue_details.to_json()
+ if self.generic_issue_details is not None:
+ json['genericIssueDetails'] = self.generic_issue_details.to_json()
+ if self.deprecation_issue_details is not None:
+ json['deprecationIssueDetails'] = self.deprecation_issue_details.to_json()
+ if self.client_hint_issue_details is not None:
+ json['clientHintIssueDetails'] = self.client_hint_issue_details.to_json()
+ if self.federated_auth_request_issue_details is not None:
+ json['federatedAuthRequestIssueDetails'] = self.federated_auth_request_issue_details.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InspectorIssueDetails:
+ return cls(
+ same_site_cookie_issue_details=SameSiteCookieIssueDetails.from_json(json['sameSiteCookieIssueDetails']) if 'sameSiteCookieIssueDetails' in json else None,
+ mixed_content_issue_details=MixedContentIssueDetails.from_json(json['mixedContentIssueDetails']) if 'mixedContentIssueDetails' in json else None,
+ blocked_by_response_issue_details=BlockedByResponseIssueDetails.from_json(json['blockedByResponseIssueDetails']) if 'blockedByResponseIssueDetails' in json else None,
+ heavy_ad_issue_details=HeavyAdIssueDetails.from_json(json['heavyAdIssueDetails']) if 'heavyAdIssueDetails' in json else None,
+ content_security_policy_issue_details=ContentSecurityPolicyIssueDetails.from_json(json['contentSecurityPolicyIssueDetails']) if 'contentSecurityPolicyIssueDetails' in json else None,
+ shared_array_buffer_issue_details=SharedArrayBufferIssueDetails.from_json(json['sharedArrayBufferIssueDetails']) if 'sharedArrayBufferIssueDetails' in json else None,
+ twa_quality_enforcement_details=TrustedWebActivityIssueDetails.from_json(json['twaQualityEnforcementDetails']) if 'twaQualityEnforcementDetails' in json else None,
+ low_text_contrast_issue_details=LowTextContrastIssueDetails.from_json(json['lowTextContrastIssueDetails']) if 'lowTextContrastIssueDetails' in json else None,
+ cors_issue_details=CorsIssueDetails.from_json(json['corsIssueDetails']) if 'corsIssueDetails' in json else None,
+ attribution_reporting_issue_details=AttributionReportingIssueDetails.from_json(json['attributionReportingIssueDetails']) if 'attributionReportingIssueDetails' in json else None,
+ quirks_mode_issue_details=QuirksModeIssueDetails.from_json(json['quirksModeIssueDetails']) if 'quirksModeIssueDetails' in json else None,
+ navigator_user_agent_issue_details=NavigatorUserAgentIssueDetails.from_json(json['navigatorUserAgentIssueDetails']) if 'navigatorUserAgentIssueDetails' in json else None,
+ generic_issue_details=GenericIssueDetails.from_json(json['genericIssueDetails']) if 'genericIssueDetails' in json else None,
+ deprecation_issue_details=DeprecationIssueDetails.from_json(json['deprecationIssueDetails']) if 'deprecationIssueDetails' in json else None,
+ client_hint_issue_details=ClientHintIssueDetails.from_json(json['clientHintIssueDetails']) if 'clientHintIssueDetails' in json else None,
+ federated_auth_request_issue_details=FederatedAuthRequestIssueDetails.from_json(json['federatedAuthRequestIssueDetails']) if 'federatedAuthRequestIssueDetails' in json else None,
+ )
+
+
+class IssueId(str):
+ '''
+ A unique id for a DevTools inspector issue. Allows other entities (e.g.
+ exceptions, CDP message, console messages, etc.) to reference an issue.
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> IssueId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'IssueId({})'.format(super().__repr__())
+
+
+@dataclass
+class InspectorIssue:
+ '''
+ An inspector issue reported from the back-end.
+ '''
+ code: InspectorIssueCode
+
+ details: InspectorIssueDetails
+
+ #: A unique id for this issue. May be omitted if no other entity (e.g.
+ #: exception, CDP message, etc.) is referencing this issue.
+ issue_id: typing.Optional[IssueId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['code'] = self.code.to_json()
+ json['details'] = self.details.to_json()
+ if self.issue_id is not None:
+ json['issueId'] = self.issue_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InspectorIssue:
+ return cls(
+ code=InspectorIssueCode.from_json(json['code']),
+ details=InspectorIssueDetails.from_json(json['details']),
+ issue_id=IssueId.from_json(json['issueId']) if 'issueId' in json else None,
+ )
def get_encoded_response(
@@ -30,7 +1147,7 @@ def get_encoded_response(
:param size_only: *(Optional)* Whether to only return the size information (defaults to false).
:returns: A tuple with the following items:
- 0. **body** - *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true.
+ 0. **body** - *(Optional)* The encoded body as a base64 string. Omitted if sizeOnly is true. (Encoded as a base64 string when passed over JSON)
1. **originalSize** - Size before re-encoding.
2. **encodedSize** - Size after re-encoding.
'''
@@ -51,3 +1168,55 @@ def get_encoded_response(
int(json['originalSize']),
int(json['encodedSize'])
)
+
+
+def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Disables issues domain, prevents further issues from being reported to the client.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Audits.disable',
+ }
+ json = yield cmd_dict
+
+
+def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables issues domain, sends the issues collected so far to the client by means of the
+ ``issueAdded`` event.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Audits.enable',
+ }
+ json = yield cmd_dict
+
+
+def check_contrast(
+ report_aaa: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Runs the contrast check for the target page. Found issues are reported
+ using Audits.issueAdded event.
+
+ :param report_aaa: *(Optional)* Whether to report WCAG AAA level issues. Default is false.
+ '''
+ params: T_JSON_DICT = dict()
+ if report_aaa is not None:
+ params['reportAAA'] = report_aaa
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Audits.checkContrast',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+@event_class('Audits.issueAdded')
+@dataclass
+class IssueAdded:
+ issue: InspectorIssue
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> IssueAdded:
+ return cls(
+ issue=InspectorIssue.from_json(json['issue'])
+ )
diff --git a/cdp/background_service.py b/cdp/background_service.py
index 703747c..49e0696 100644
--- a/cdp/background_service.py
+++ b/cdp/background_service.py
@@ -26,6 +26,7 @@ class ServiceName(enum.Enum):
PUSH_MESSAGING = "pushMessaging"
NOTIFICATIONS = "notifications"
PAYMENT_HANDLER = "paymentHandler"
+ PERIODIC_BACKGROUND_SYNC = "periodicBackgroundSync"
def to_json(self) -> str:
return self.value
diff --git a/cdp/browser.py b/cdp/browser.py
index 20102f7..b9e7805 100644
--- a/cdp/browser.py
+++ b/cdp/browser.py
@@ -11,9 +11,22 @@
import enum
import typing
+from . import page
from . import target
+class BrowserContextID(str):
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> BrowserContextID:
+ return cls(json)
+
+ def __repr__(self):
+ return 'BrowserContextID({})'.format(super().__repr__())
+
+
class WindowID(int):
def to_json(self) -> int:
return self
@@ -93,19 +106,22 @@ class PermissionType(enum.Enum):
AUDIO_CAPTURE = "audioCapture"
BACKGROUND_SYNC = "backgroundSync"
BACKGROUND_FETCH = "backgroundFetch"
- CLIPBOARD_READ = "clipboardRead"
- CLIPBOARD_WRITE = "clipboardWrite"
+ CLIPBOARD_READ_WRITE = "clipboardReadWrite"
+ CLIPBOARD_SANITIZED_WRITE = "clipboardSanitizedWrite"
+ DISPLAY_CAPTURE = "displayCapture"
DURABLE_STORAGE = "durableStorage"
FLASH = "flash"
GEOLOCATION = "geolocation"
MIDI = "midi"
MIDI_SYSEX = "midiSysex"
+ NFC = "nfc"
NOTIFICATIONS = "notifications"
PAYMENT_HANDLER = "paymentHandler"
PERIODIC_BACKGROUND_SYNC = "periodicBackgroundSync"
PROTECTED_MEDIA_IDENTIFIER = "protectedMediaIdentifier"
SENSORS = "sensors"
VIDEO_CAPTURE = "videoCapture"
+ VIDEO_CAPTURE_PAN_TILT_ZOOM = "videoCapturePanTiltZoom"
IDLE_DETECTION = "idleDetection"
WAKE_LOCK_SCREEN = "wakeLockScreen"
WAKE_LOCK_SYSTEM = "wakeLockSystem"
@@ -118,6 +134,81 @@ def from_json(cls, json: str) -> PermissionType:
return cls(json)
+class PermissionSetting(enum.Enum):
+ GRANTED = "granted"
+ DENIED = "denied"
+ PROMPT = "prompt"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> PermissionSetting:
+ return cls(json)
+
+
+@dataclass
+class PermissionDescriptor:
+ '''
+ Definition of PermissionDescriptor defined in the Permissions API:
+ https://w3c.github.io/permissions/#dictdef-permissiondescriptor.
+ '''
+ #: Name of permission.
+ #: See https://cs.chromium.org/chromium/src/third_party/blink/renderer/modules/permissions/permission_descriptor.idl for valid permission names.
+ name: str
+
+ #: For "midi" permission, may also specify sysex control.
+ sysex: typing.Optional[bool] = None
+
+ #: For "push" permission, may specify userVisibleOnly.
+ #: Note that userVisibleOnly = true is the only currently supported type.
+ user_visible_only: typing.Optional[bool] = None
+
+ #: For "clipboard" permission, may specify allowWithoutSanitization.
+ allow_without_sanitization: typing.Optional[bool] = None
+
+ #: For "camera" permission, may specify panTiltZoom.
+ pan_tilt_zoom: typing.Optional[bool] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ if self.sysex is not None:
+ json['sysex'] = self.sysex
+ if self.user_visible_only is not None:
+ json['userVisibleOnly'] = self.user_visible_only
+ if self.allow_without_sanitization is not None:
+ json['allowWithoutSanitization'] = self.allow_without_sanitization
+ if self.pan_tilt_zoom is not None:
+ json['panTiltZoom'] = self.pan_tilt_zoom
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PermissionDescriptor:
+ return cls(
+ name=str(json['name']),
+ sysex=bool(json['sysex']) if 'sysex' in json else None,
+ user_visible_only=bool(json['userVisibleOnly']) if 'userVisibleOnly' in json else None,
+ allow_without_sanitization=bool(json['allowWithoutSanitization']) if 'allowWithoutSanitization' in json else None,
+ pan_tilt_zoom=bool(json['panTiltZoom']) if 'panTiltZoom' in json else None,
+ )
+
+
+class BrowserCommandId(enum.Enum):
+ '''
+ Browser command ids used by executeBrowserCommand.
+ '''
+ OPEN_TAB_SEARCH = "openTabSearch"
+ CLOSE_TAB_SEARCH = "closeTabSearch"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> BrowserCommandId:
+ return cls(json)
+
+
@dataclass
class Bucket:
'''
@@ -183,23 +274,54 @@ def from_json(cls, json: T_JSON_DICT) -> Histogram:
)
+def set_permission(
+ permission: PermissionDescriptor,
+ setting: PermissionSetting,
+ origin: typing.Optional[str] = None,
+ browser_context_id: typing.Optional[BrowserContextID] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Set permission settings for given origin.
+
+ **EXPERIMENTAL**
+
+ :param permission: Descriptor of permission to override.
+ :param setting: Setting of the permission.
+ :param origin: *(Optional)* Origin the permission applies to, all origins if not specified.
+ :param browser_context_id: *(Optional)* Context to override. When omitted, default browser context is used.
+ '''
+ params: T_JSON_DICT = dict()
+ params['permission'] = permission.to_json()
+ params['setting'] = setting.to_json()
+ if origin is not None:
+ params['origin'] = origin
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Browser.setPermission',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def grant_permissions(
- origin: str,
permissions: typing.List[PermissionType],
- browser_context_id: typing.Optional[target.BrowserContextID] = None
+ origin: typing.Optional[str] = None,
+ browser_context_id: typing.Optional[BrowserContextID] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Grant specific permissions to the given origin and reject all others.
**EXPERIMENTAL**
- :param origin:
:param permissions:
+ :param origin: *(Optional)* Origin the permission applies to, all origins if not specified.
:param browser_context_id: *(Optional)* BrowserContext to override permissions. When omitted, default browser context is used.
'''
params: T_JSON_DICT = dict()
- params['origin'] = origin
params['permissions'] = [i.to_json() for i in permissions]
+ if origin is not None:
+ params['origin'] = origin
if browser_context_id is not None:
params['browserContextId'] = browser_context_id.to_json()
cmd_dict: T_JSON_DICT = {
@@ -210,7 +332,7 @@ def grant_permissions(
def reset_permissions(
- browser_context_id: typing.Optional[target.BrowserContextID] = None
+ browser_context_id: typing.Optional[BrowserContextID] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Reset all permission management for all origins.
@@ -229,6 +351,60 @@ def reset_permissions(
json = yield cmd_dict
+def set_download_behavior(
+ behavior: str,
+ browser_context_id: typing.Optional[BrowserContextID] = None,
+ download_path: typing.Optional[str] = None,
+ events_enabled: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Set the behavior when downloading a file.
+
+ **EXPERIMENTAL**
+
+ :param behavior: Whether to allow all or deny all download requests, or use default Chrome behavior if available (otherwise deny). ``allowAndName`` allows download and names files according to their dowmload guids.
+ :param browser_context_id: *(Optional)* BrowserContext to set download behavior. When omitted, default browser context is used.
+ :param download_path: *(Optional)* The default path to save downloaded files to. This is required if behavior is set to 'allow' or 'allowAndName'.
+ :param events_enabled: *(Optional)* Whether to emit download events (defaults to false).
+ '''
+ params: T_JSON_DICT = dict()
+ params['behavior'] = behavior
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ if download_path is not None:
+ params['downloadPath'] = download_path
+ if events_enabled is not None:
+ params['eventsEnabled'] = events_enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Browser.setDownloadBehavior',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def cancel_download(
+ guid: str,
+ browser_context_id: typing.Optional[BrowserContextID] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Cancel a download if in progress
+
+ **EXPERIMENTAL**
+
+ :param guid: Global unique identifier of the download.
+ :param browser_context_id: *(Optional)* BrowserContext to perform the action in. When omitted, default browser context is used.
+ '''
+ params: T_JSON_DICT = dict()
+ params['guid'] = guid
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Browser.cancelDownload',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def close() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Close browser gracefully.
@@ -436,7 +612,7 @@ def set_dock_tile(
**EXPERIMENTAL**
:param badge_label: *(Optional)*
- :param image: *(Optional)* Png encoded image.
+ :param image: *(Optional)* Png encoded image. (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
if badge_label is not None:
@@ -448,3 +624,76 @@ def set_dock_tile(
'params': params,
}
json = yield cmd_dict
+
+
+def execute_browser_command(
+ command_id: BrowserCommandId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Invoke custom browser commands used by telemetry.
+
+ **EXPERIMENTAL**
+
+ :param command_id:
+ '''
+ params: T_JSON_DICT = dict()
+ params['commandId'] = command_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Browser.executeBrowserCommand',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+@event_class('Browser.downloadWillBegin')
+@dataclass
+class DownloadWillBegin:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when page is about to start a download.
+ '''
+ #: Id of the frame that caused the download to begin.
+ frame_id: page.FrameId
+ #: Global unique identifier of the download.
+ guid: str
+ #: URL of the resource being downloaded.
+ url: str
+ #: Suggested file name of the resource (the actual name of the file saved on disk may differ).
+ suggested_filename: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DownloadWillBegin:
+ return cls(
+ frame_id=page.FrameId.from_json(json['frameId']),
+ guid=str(json['guid']),
+ url=str(json['url']),
+ suggested_filename=str(json['suggestedFilename'])
+ )
+
+
+@event_class('Browser.downloadProgress')
+@dataclass
+class DownloadProgress:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when download makes progress. Last call has ``done`` == true.
+ '''
+ #: Global unique identifier of the download.
+ guid: str
+ #: Total expected bytes to download.
+ total_bytes: float
+ #: Total bytes received.
+ received_bytes: float
+ #: Download status.
+ state: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DownloadProgress:
+ return cls(
+ guid=str(json['guid']),
+ total_bytes=float(json['totalBytes']),
+ received_bytes=float(json['receivedBytes']),
+ state=str(json['state'])
+ )
diff --git a/cdp/cache_storage.py b/cdp/cache_storage.py
index 62646c8..f0d40a6 100644
--- a/cdp/cache_storage.py
+++ b/cdp/cache_storage.py
@@ -156,7 +156,7 @@ class CachedResponse:
'''
Cached response
'''
- #: Entry content, base64-encoded.
+ #: Entry content, base64-encoded. (Encoded as a base64 string when passed over JSON)
body: str
def to_json(self) -> T_JSON_DICT:
@@ -254,16 +254,16 @@ def request_cached_response(
def request_entries(
cache_id: CacheId,
- skip_count: int,
- page_size: int,
+ skip_count: typing.Optional[int] = None,
+ page_size: typing.Optional[int] = None,
path_filter: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[DataEntry], float]]:
'''
Requests data from cache.
:param cache_id: ID of cache to get entries from.
- :param skip_count: Number of records to skip.
- :param page_size: Number of records to fetch.
+ :param skip_count: *(Optional)* Number of records to skip.
+ :param page_size: *(Optional)* Number of records to fetch.
:param path_filter: *(Optional)* If present, only return the entries containing this substring in the path
:returns: A tuple with the following items:
@@ -272,8 +272,10 @@ def request_entries(
'''
params: T_JSON_DICT = dict()
params['cacheId'] = cache_id.to_json()
- params['skipCount'] = skip_count
- params['pageSize'] = page_size
+ if skip_count is not None:
+ params['skipCount'] = skip_count
+ if page_size is not None:
+ params['pageSize'] = page_size
if path_filter is not None:
params['pathFilter'] = path_filter
cmd_dict: T_JSON_DICT = {
diff --git a/cdp/cast.py b/cdp/cast.py
index 054071c..4ee4045 100644
--- a/cdp/cast.py
+++ b/cdp/cast.py
@@ -89,6 +89,23 @@ def set_sink_to_use(
json = yield cmd_dict
+def start_desktop_mirroring(
+ sink_name: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Starts mirroring the desktop to the sink.
+
+ :param sink_name:
+ '''
+ params: T_JSON_DICT = dict()
+ params['sinkName'] = sink_name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Cast.startDesktopMirroring',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def start_tab_mirroring(
sink_name: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
diff --git a/cdp/css.py b/cdp/css.py
index bee93c6..668e8ab 100644
--- a/cdp/css.py
+++ b/cdp/css.py
@@ -184,7 +184,9 @@ class CSSStyleSheetHeader:
#: Owner frame identifier.
frame_id: page.FrameId
- #: Stylesheet resource URL.
+ #: Stylesheet resource URL. Empty if this is a constructed stylesheet created using
+ #: new CSSStyleSheet() (but non-empty if this is a constructed sylesheet imported
+ #: as a CSS module script).
source_url: str
#: Stylesheet origin.
@@ -200,6 +202,16 @@ class CSSStyleSheetHeader:
#: document.written STYLE tags.
is_inline: bool
+ #: Whether this stylesheet is mutable. Inline stylesheets become mutable
+ #: after they have been modified via CSSOM API.
+ #: element's stylesheets become mutable only if DevTools modifies them.
+ #: Constructed stylesheets (new CSSStyleSheet()) are mutable immediately after creation.
+ is_mutable: bool
+
+ #: True if this stylesheet is created through new CSSStyleSheet() or imported as a
+ #: CSS module script.
+ is_constructed: bool
+
#: Line offset of the stylesheet within the resource (zero based).
start_line: float
@@ -209,6 +221,12 @@ class CSSStyleSheetHeader:
#: Size of the content (in characters).
length: float
+ #: Line offset of the end of the stylesheet within the resource (zero based).
+ end_line: float
+
+ #: Column offset of the end of the stylesheet within the resource (zero based).
+ end_column: float
+
#: URL of source map associated with the stylesheet (if any).
source_map_url: typing.Optional[str] = None
@@ -227,9 +245,13 @@ def to_json(self) -> T_JSON_DICT:
json['title'] = self.title
json['disabled'] = self.disabled
json['isInline'] = self.is_inline
+ json['isMutable'] = self.is_mutable
+ json['isConstructed'] = self.is_constructed
json['startLine'] = self.start_line
json['startColumn'] = self.start_column
json['length'] = self.length
+ json['endLine'] = self.end_line
+ json['endColumn'] = self.end_column
if self.source_map_url is not None:
json['sourceMapURL'] = self.source_map_url
if self.owner_node is not None:
@@ -248,9 +270,13 @@ def from_json(cls, json: T_JSON_DICT) -> CSSStyleSheetHeader:
title=str(json['title']),
disabled=bool(json['disabled']),
is_inline=bool(json['isInline']),
+ is_mutable=bool(json['isMutable']),
+ is_constructed=bool(json['isConstructed']),
start_line=float(json['startLine']),
start_column=float(json['startColumn']),
length=float(json['length']),
+ end_line=float(json['endLine']),
+ end_column=float(json['endColumn']),
source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None,
owner_node=dom.BackendNodeId.from_json(json['ownerNode']) if 'ownerNode' in json else None,
has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
@@ -279,6 +305,14 @@ class CSSRule:
#: starting with the innermost one, going outwards.
media: typing.Optional[typing.List[CSSMedia]] = None
+ #: Container query list array (for rules involving container queries).
+ #: The array enumerates container queries starting with the innermost one, going outwards.
+ container_queries: typing.Optional[typing.List[CSSContainerQuery]] = None
+
+ #: @supports CSS at-rule array.
+ #: The array enumerates @supports at-rules starting with the innermost one, going outwards.
+ supports: typing.Optional[typing.List[CSSSupports]] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['selectorList'] = self.selector_list.to_json()
@@ -288,6 +322,10 @@ def to_json(self) -> T_JSON_DICT:
json['styleSheetId'] = self.style_sheet_id.to_json()
if self.media is not None:
json['media'] = [i.to_json() for i in self.media]
+ if self.container_queries is not None:
+ json['containerQueries'] = [i.to_json() for i in self.container_queries]
+ if self.supports is not None:
+ json['supports'] = [i.to_json() for i in self.supports]
return json
@classmethod
@@ -298,6 +336,8 @@ def from_json(cls, json: T_JSON_DICT) -> CSSRule:
style=CSSStyle.from_json(json['style']),
style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
media=[CSSMedia.from_json(i) for i in json['media']] if 'media' in json else None,
+ container_queries=[CSSContainerQuery.from_json(i) for i in json['containerQueries']] if 'containerQueries' in json else None,
+ supports=[CSSSupports.from_json(i) for i in json['supports']] if 'supports' in json else None,
)
@@ -647,6 +687,78 @@ def from_json(cls, json: T_JSON_DICT) -> MediaQueryExpression:
)
+@dataclass
+class CSSContainerQuery:
+ '''
+ CSS container query rule descriptor.
+ '''
+ #: Container query text.
+ text: str
+
+ #: The associated rule header range in the enclosing stylesheet (if
+ #: available).
+ range_: typing.Optional[SourceRange] = None
+
+ #: Identifier of the stylesheet containing this object (if exists).
+ style_sheet_id: typing.Optional[StyleSheetId] = None
+
+ #: Optional name for the container.
+ name: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['text'] = self.text
+ if self.range_ is not None:
+ json['range'] = self.range_.to_json()
+ if self.style_sheet_id is not None:
+ json['styleSheetId'] = self.style_sheet_id.to_json()
+ if self.name is not None:
+ json['name'] = self.name
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSContainerQuery:
+ return cls(
+ text=str(json['text']),
+ range_=SourceRange.from_json(json['range']) if 'range' in json else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
+ name=str(json['name']) if 'name' in json else None,
+ )
+
+
+@dataclass
+class CSSSupports:
+ '''
+ CSS Supports at-rule descriptor.
+ '''
+ #: Supports rule text.
+ text: str
+
+ #: The associated rule header range in the enclosing stylesheet (if
+ #: available).
+ range_: typing.Optional[SourceRange] = None
+
+ #: Identifier of the stylesheet containing this object (if exists).
+ style_sheet_id: typing.Optional[StyleSheetId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['text'] = self.text
+ if self.range_ is not None:
+ json['range'] = self.range_.to_json()
+ if self.style_sheet_id is not None:
+ json['styleSheetId'] = self.style_sheet_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSSupports:
+ return cls(
+ text=str(json['text']),
+ range_=SourceRange.from_json(json['range']) if 'range' in json else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
+ )
+
+
@dataclass
class PlatformFontUsage:
'''
@@ -677,10 +789,51 @@ def from_json(cls, json: T_JSON_DICT) -> PlatformFontUsage:
)
+@dataclass
+class FontVariationAxis:
+ '''
+ Information about font variation axes for variable fonts
+ '''
+ #: The font-variation-setting tag (a.k.a. "axis tag").
+ tag: str
+
+ #: Human-readable variation name in the default language (normally, "en").
+ name: str
+
+ #: The minimum value (inclusive) the font supports for this tag.
+ min_value: float
+
+ #: The maximum value (inclusive) the font supports for this tag.
+ max_value: float
+
+ #: The default value.
+ default_value: float
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['tag'] = self.tag
+ json['name'] = self.name
+ json['minValue'] = self.min_value
+ json['maxValue'] = self.max_value
+ json['defaultValue'] = self.default_value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FontVariationAxis:
+ return cls(
+ tag=str(json['tag']),
+ name=str(json['name']),
+ min_value=float(json['minValue']),
+ max_value=float(json['maxValue']),
+ default_value=float(json['defaultValue']),
+ )
+
+
@dataclass
class FontFace:
'''
Properties of a web font: https://www.w3.org/TR/2008/REC-CSS2-20080411/fonts.html#font-descriptions
+ and additional information such as platformFontFamily and fontVariationAxes.
'''
#: The font-family.
font_family: str
@@ -706,6 +859,9 @@ class FontFace:
#: The resolved platform font family
platform_font_family: str
+ #: Available variation settings (a.k.a. "axes").
+ font_variation_axes: typing.Optional[typing.List[FontVariationAxis]] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['fontFamily'] = self.font_family
@@ -716,6 +872,8 @@ def to_json(self) -> T_JSON_DICT:
json['unicodeRange'] = self.unicode_range
json['src'] = self.src
json['platformFontFamily'] = self.platform_font_family
+ if self.font_variation_axes is not None:
+ json['fontVariationAxes'] = [i.to_json() for i in self.font_variation_axes]
return json
@classmethod
@@ -729,6 +887,7 @@ def from_json(cls, json: T_JSON_DICT) -> FontFace:
unicode_range=str(json['unicodeRange']),
src=str(json['src']),
platform_font_family=str(json['platformFontFamily']),
+ font_variation_axes=[FontVariationAxis.from_json(i) for i in json['fontVariationAxes']] if 'fontVariationAxes' in json else None,
)
@@ -1085,6 +1244,45 @@ def get_style_sheet_text(
return str(json['text'])
+def track_computed_style_updates(
+ properties_to_track: typing.List[CSSComputedStyleProperty]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Starts tracking the given computed styles for updates. The specified array of properties
+ replaces the one previously specified. Pass empty array to disable tracking.
+ Use takeComputedStyleUpdates to retrieve the list of nodes that had properties modified.
+ The changes to computed style properties are only tracked for nodes pushed to the front-end
+ by the DOM agent. If no changes to the tracked properties occur after the node has been pushed
+ to the front-end, no updates will be issued for the node.
+
+ **EXPERIMENTAL**
+
+ :param properties_to_track:
+ '''
+ params: T_JSON_DICT = dict()
+ params['propertiesToTrack'] = [i.to_json() for i in properties_to_track]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.trackComputedStyleUpdates',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def take_computed_style_updates() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[dom.NodeId]]:
+ '''
+ Polls the next batch of computed style updates.
+
+ **EXPERIMENTAL**
+
+ :returns: The list of node Ids that have their tracked computed styles updated
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.takeComputedStyleUpdates',
+ }
+ json = yield cmd_dict
+ return [dom.NodeId.from_json(i) for i in json['nodeIds']]
+
+
def set_effective_property_value_for_node(
node_id: dom.NodeId,
property_name: str,
@@ -1159,6 +1357,60 @@ def set_media_text(
return CSSMedia.from_json(json['media'])
+def set_container_query_text(
+ style_sheet_id: StyleSheetId,
+ range_: SourceRange,
+ text: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSContainerQuery]:
+ '''
+ Modifies the expression of a container query.
+
+ **EXPERIMENTAL**
+
+ :param style_sheet_id:
+ :param range_:
+ :param text:
+ :returns: The resulting CSS container query rule after modification.
+ '''
+ params: T_JSON_DICT = dict()
+ params['styleSheetId'] = style_sheet_id.to_json()
+ params['range'] = range_.to_json()
+ params['text'] = text
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.setContainerQueryText',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return CSSContainerQuery.from_json(json['containerQuery'])
+
+
+def set_supports_text(
+ style_sheet_id: StyleSheetId,
+ range_: SourceRange,
+ text: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSSupports]:
+ '''
+ Modifies the expression of a supports at-rule.
+
+ **EXPERIMENTAL**
+
+ :param style_sheet_id:
+ :param range_:
+ :param text:
+ :returns: The resulting CSS Supports rule after modification.
+ '''
+ params: T_JSON_DICT = dict()
+ params['styleSheetId'] = style_sheet_id.to_json()
+ params['range'] = range_.to_json()
+ params['text'] = text
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.setSupportsText',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return CSSSupports.from_json(json['supports'])
+
+
def set_rule_selector(
style_sheet_id: StyleSheetId,
range_: SourceRange,
@@ -1249,18 +1501,43 @@ def stop_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typin
return [RuleUsage.from_json(i) for i in json['ruleUsage']]
-def take_coverage_delta() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[RuleUsage]]:
+def take_coverage_delta() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[RuleUsage], float]]:
'''
Obtain list of rules that became used since last call to this method (or since start of coverage
instrumentation)
- :returns:
+ :returns: A tuple with the following items:
+
+ 0. **coverage** -
+ 1. **timestamp** - Monotonically increasing time, in seconds.
'''
cmd_dict: T_JSON_DICT = {
'method': 'CSS.takeCoverageDelta',
}
json = yield cmd_dict
- return [RuleUsage.from_json(i) for i in json['coverage']]
+ return (
+ [RuleUsage.from_json(i) for i in json['coverage']],
+ float(json['timestamp'])
+ )
+
+
+def set_local_fonts_enabled(
+ enabled: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables/disables rendering of local CSS fonts (enabled by default).
+
+ **EXPERIMENTAL**
+
+ :param enabled: Whether rendering of local fonts is enabled.
+ '''
+ params: T_JSON_DICT = dict()
+ params['enabled'] = enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.setLocalFontsEnabled',
+ 'params': params,
+ }
+ json = yield cmd_dict
@event_class('CSS.fontsUpdated')
diff --git a/cdp/debugger.py b/cdp/debugger.py
index 873d783..adc0cb4 100644
--- a/cdp/debugger.py
+++ b/cdp/debugger.py
@@ -12,6 +12,7 @@
import typing
from . import runtime
+from deprecated.sphinx import deprecated # type: ignore
class BreakpointId(str):
@@ -98,6 +99,33 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptPosition:
)
+@dataclass
+class LocationRange:
+ '''
+ Location range within one script.
+ '''
+ script_id: runtime.ScriptId
+
+ start: ScriptPosition
+
+ end: ScriptPosition
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['scriptId'] = self.script_id.to_json()
+ json['start'] = self.start.to_json()
+ json['end'] = self.end.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LocationRange:
+ return cls(
+ script_id=runtime.ScriptId.from_json(json['scriptId']),
+ start=ScriptPosition.from_json(json['start']),
+ end=ScriptPosition.from_json(json['end']),
+ )
+
+
@dataclass
class CallFrame:
'''
@@ -257,6 +285,47 @@ def from_json(cls, json: T_JSON_DICT) -> BreakLocation:
)
+class ScriptLanguage(enum.Enum):
+ '''
+ Enum of possible script languages.
+ '''
+ JAVA_SCRIPT = "JavaScript"
+ WEB_ASSEMBLY = "WebAssembly"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ScriptLanguage:
+ return cls(json)
+
+
+@dataclass
+class DebugSymbols:
+ '''
+ Debug symbols available for a wasm script.
+ '''
+ #: Type of the debug symbols.
+ type_: str
+
+ #: URL of the external symbol source.
+ external_url: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['type'] = self.type_
+ if self.external_url is not None:
+ json['externalURL'] = self.external_url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DebugSymbols:
+ return cls(
+ type_=str(json['type']),
+ external_url=str(json['externalURL']) if 'externalURL' in json else None,
+ )
+
+
def continue_to_location(
location: Location,
target_call_frames: typing.Optional[str] = None
@@ -295,7 +364,7 @@ def enable(
Enables debugger for the given page. Clients should not assume that the debugging has been
enabled until the result for this command is received.
- :param max_scripts_cache_size: **(EXPERIMENTAL)** *(Optional)* The maximum size in bytes of collected scripts (not referenced by other heap objects) the debugger can hold. Puts no limit if paramter is omitted.
+ :param max_scripts_cache_size: **(EXPERIMENTAL)** *(Optional)* The maximum size in bytes of collected scripts (not referenced by other heap objects) the debugger can hold. Puts no limit if parameter is omitted.
:returns: Unique identifier of the debugger.
'''
params: T_JSON_DICT = dict()
@@ -395,12 +464,15 @@ def get_possible_breakpoints(
def get_script_source(
script_id: runtime.ScriptId
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, typing.Optional[str]]]:
'''
Returns source for the script with given id.
:param script_id: Id of the script to get source for.
- :returns: Script source.
+ :returns: A tuple with the following items:
+
+ 0. **scriptSource** - Script source (empty in case of Wasm bytecode).
+ 1. **bytecode** - *(Optional)* Wasm bytecode. (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
params['scriptId'] = script_id.to_json()
@@ -409,7 +481,32 @@ def get_script_source(
'params': params,
}
json = yield cmd_dict
- return str(json['scriptSource'])
+ return (
+ str(json['scriptSource']),
+ str(json['bytecode']) if 'bytecode' in json else None
+ )
+
+
+@deprecated(version="1.3")
+def get_wasm_bytecode(
+ script_id: runtime.ScriptId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
+ '''
+ This command is deprecated. Use getScriptSource instead.
+
+ .. deprecated:: 1.3
+
+ :param script_id: Id of the Wasm script to get source for.
+ :returns: Script source. (Encoded as a base64 string when passed over JSON)
+ '''
+ params: T_JSON_DICT = dict()
+ params['scriptId'] = script_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Debugger.getWasmBytecode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return str(json['bytecode'])
def get_stack_trace(
@@ -443,12 +540,15 @@ def pause() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
+@deprecated(version="1.3")
def pause_on_async_call(
parent_stack_trace_id: runtime.StackTraceId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
+ .. deprecated:: 1.3
+
**EXPERIMENTAL**
:param parent_stack_trace_id: Debugger will pause when async call with given stack trace is started.
@@ -479,12 +579,15 @@ def remove_breakpoint(
json = yield cmd_dict
+@deprecated(version="1.3")
def restart_frame(
call_frame_id: CallFrameId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[CallFrame], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId]]]:
'''
Restarts particular call frame from the beginning.
+ .. deprecated:: 1.3
+
:param call_frame_id: Call frame identifier to evaluate on.
:returns: A tuple with the following items:
@@ -506,12 +609,20 @@ def restart_frame(
)
-def resume() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+def resume(
+ terminate_on_resume: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Resumes JavaScript execution.
+
+ :param terminate_on_resume: *(Optional)* Set to true to terminate execution upon resuming execution. In contrast to Runtime.terminateExecution, this will allows to execute further JavaScript (i.e. via evaluation) until execution of the paused code is actually resumed, at which point termination is triggered. If execution is currently not paused, this parameter has no effect.
'''
+ params: T_JSON_DICT = dict()
+ if terminate_on_resume is not None:
+ params['terminateOnResume'] = terminate_on_resume
cmd_dict: T_JSON_DICT = {
'method': 'Debugger.resume',
+ 'params': params,
}
json = yield cmd_dict
@@ -869,16 +980,20 @@ def set_variable_value(
def step_into(
- break_on_async_call: typing.Optional[bool] = None
+ break_on_async_call: typing.Optional[bool] = None,
+ skip_list: typing.Optional[typing.List[LocationRange]] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Steps into the function call.
- :param break_on_async_call: **(EXPERIMENTAL)** *(Optional)* Debugger will issue additional Debugger.paused notification if any async task is scheduled before next pause.
+ :param break_on_async_call: **(EXPERIMENTAL)** *(Optional)* Debugger will pause on the execution of the first async task which was scheduled before next pause.
+ :param skip_list: **(EXPERIMENTAL)** *(Optional)* The skipList specifies location ranges that should be skipped on step into.
'''
params: T_JSON_DICT = dict()
if break_on_async_call is not None:
params['breakOnAsyncCall'] = break_on_async_call
+ if skip_list is not None:
+ params['skipList'] = [i.to_json() for i in skip_list]
cmd_dict: T_JSON_DICT = {
'method': 'Debugger.stepInto',
'params': params,
@@ -896,12 +1011,20 @@ def step_out() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def step_over() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+def step_over(
+ skip_list: typing.Optional[typing.List[LocationRange]] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Steps over the statement.
+
+ :param skip_list: **(EXPERIMENTAL)** *(Optional)* The skipList specifies location ranges that should be skipped on step over.
'''
+ params: T_JSON_DICT = dict()
+ if skip_list is not None:
+ params['skipList'] = [i.to_json() for i in skip_list]
cmd_dict: T_JSON_DICT = {
'method': 'Debugger.stepOver',
+ 'params': params,
}
json = yield cmd_dict
@@ -943,8 +1066,7 @@ class Paused:
async_stack_trace: typing.Optional[runtime.StackTrace]
#: Async stack trace, if any.
async_stack_trace_id: typing.Optional[runtime.StackTraceId]
- #: Just scheduled async call will have this stack trace as parent stack during async execution.
- #: This field is available only after ``Debugger.stepInto`` call with ``breakOnAsynCall`` flag.
+ #: Never present, will be removed.
async_call_stack_trace_id: typing.Optional[runtime.StackTraceId]
@classmethod
@@ -1009,6 +1131,12 @@ class ScriptFailedToParse:
length: typing.Optional[int]
#: JavaScript top stack frame of where the script parsed event was triggered if available.
stack_trace: typing.Optional[runtime.StackTrace]
+ #: If the scriptLanguage is WebAssembly, the code section offset in the module.
+ code_offset: typing.Optional[int]
+ #: The language of the script.
+ script_language: typing.Optional[ScriptLanguage]
+ #: The name the embedder supplied for this script.
+ embedder_name: typing.Optional[str]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScriptFailedToParse:
@@ -1026,7 +1154,10 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptFailedToParse:
has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
is_module=bool(json['isModule']) if 'isModule' in json else None,
length=int(json['length']) if 'length' in json else None,
- stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None
+ stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
+ code_offset=int(json['codeOffset']) if 'codeOffset' in json else None,
+ script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None,
+ embedder_name=str(json['embedderName']) if 'embedderName' in json else None
)
@@ -1067,6 +1198,14 @@ class ScriptParsed:
length: typing.Optional[int]
#: JavaScript top stack frame of where the script parsed event was triggered if available.
stack_trace: typing.Optional[runtime.StackTrace]
+ #: If the scriptLanguage is WebAssembly, the code section offset in the module.
+ code_offset: typing.Optional[int]
+ #: The language of the script.
+ script_language: typing.Optional[ScriptLanguage]
+ #: If the scriptLanguage is WebASsembly, the source of debug symbols for the module.
+ debug_symbols: typing.Optional[DebugSymbols]
+ #: The name the embedder supplied for this script.
+ embedder_name: typing.Optional[str]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScriptParsed:
@@ -1085,5 +1224,9 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptParsed:
has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
is_module=bool(json['isModule']) if 'isModule' in json else None,
length=int(json['length']) if 'length' in json else None,
- stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None
+ stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
+ code_offset=int(json['codeOffset']) if 'codeOffset' in json else None,
+ script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None,
+ debug_symbols=DebugSymbols.from_json(json['debugSymbols']) if 'debugSymbols' in json else None,
+ embedder_name=str(json['embedderName']) if 'embedderName' in json else None
)
diff --git a/cdp/dom.py b/cdp/dom.py
index 8e03c98..1faf4d1 100644
--- a/cdp/dom.py
+++ b/cdp/dom.py
@@ -13,6 +13,7 @@
from . import page
from . import runtime
+from deprecated.sphinx import deprecated # type: ignore
class NodeId(int):
@@ -83,8 +84,13 @@ class PseudoType(enum.Enum):
FIRST_LETTER = "first-letter"
BEFORE = "before"
AFTER = "after"
+ MARKER = "marker"
BACKDROP = "backdrop"
SELECTION = "selection"
+ TARGET_TEXT = "target-text"
+ SPELLING_ERROR = "spelling-error"
+ GRAMMAR_ERROR = "grammar-error"
+ HIGHLIGHT = "highlight"
FIRST_LINE_INHERITED = "first-line-inherited"
SCROLLBAR = "scrollbar"
SCROLLBAR_THUMB = "scrollbar-thumb"
@@ -94,6 +100,10 @@ class PseudoType(enum.Enum):
SCROLLBAR_CORNER = "scrollbar-corner"
RESIZER = "resizer"
INPUT_LIST_BUTTON = "input-list-button"
+ TRANSITION = "transition"
+ TRANSITION_CONTAINER = "transition-container"
+ TRANSITION_OLD_CONTENT = "transition-old-content"
+ TRANSITION_NEW_CONTENT = "transition-new-content"
def to_json(self) -> str:
return self.value
@@ -119,6 +129,22 @@ def from_json(cls, json: str) -> ShadowRootType:
return cls(json)
+class CompatibilityMode(enum.Enum):
+ '''
+ Document compatibility mode.
+ '''
+ QUIRKS_MODE = "QuirksMode"
+ LIMITED_QUIRKS_MODE = "LimitedQuirksMode"
+ NO_QUIRKS_MODE = "NoQuirksMode"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CompatibilityMode:
+ return cls(json)
+
+
@dataclass
class Node:
'''
@@ -202,7 +228,9 @@ class Node:
#: Pseudo elements associated with this node.
pseudo_elements: typing.Optional[typing.List[Node]] = None
- #: Import document for the HTMLImport links.
+ #: Deprecated, as the HTML Imports API has been removed (crbug.com/937746).
+ #: This property used to return the imported document for the HTMLImport links.
+ #: The property is always undefined now.
imported_document: typing.Optional[Node] = None
#: Distributed nodes for given insertion point.
@@ -211,6 +239,8 @@ class Node:
#: Whether the node is SVG.
is_svg: typing.Optional[bool] = None
+ compatibility_mode: typing.Optional[CompatibilityMode] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['nodeId'] = self.node_id.to_json()
@@ -263,6 +293,8 @@ def to_json(self) -> T_JSON_DICT:
json['distributedNodes'] = [i.to_json() for i in self.distributed_nodes]
if self.is_svg is not None:
json['isSVG'] = self.is_svg
+ if self.compatibility_mode is not None:
+ json['compatibilityMode'] = self.compatibility_mode.to_json()
return json
@classmethod
@@ -296,6 +328,7 @@ def from_json(cls, json: T_JSON_DICT) -> Node:
imported_document=Node.from_json(json['importedDocument']) if 'importedDocument' in json else None,
distributed_nodes=[BackendNode.from_json(i) for i in json['distributedNodes']] if 'distributedNodes' in json else None,
is_svg=bool(json['isSVG']) if 'isSVG' in json else None,
+ compatibility_mode=CompatibilityMode.from_json(json['compatibilityMode']) if 'compatibilityMode' in json else None,
)
@@ -466,6 +499,28 @@ def from_json(cls, json: T_JSON_DICT) -> Rect:
)
+@dataclass
+class CSSComputedStyleProperty:
+ #: Computed style property name.
+ name: str
+
+ #: Computed style property value.
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSComputedStyleProperty:
+ return cls(
+ name=str(json['name']),
+ value=str(json['value']),
+ )
+
+
def collect_class_names_from_subtree(
node_id: NodeId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[str]]:
@@ -553,6 +608,40 @@ def describe_node(
return Node.from_json(json['node'])
+def scroll_into_view_if_needed(
+ node_id: typing.Optional[NodeId] = None,
+ backend_node_id: typing.Optional[BackendNodeId] = None,
+ object_id: typing.Optional[runtime.RemoteObjectId] = None,
+ rect: typing.Optional[Rect] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Scrolls the specified rect of the given node into view if not already visible.
+ Note: exactly one between nodeId, backendNodeId and objectId should be passed
+ to identify the node.
+
+ **EXPERIMENTAL**
+
+ :param node_id: *(Optional)* Identifier of the node.
+ :param backend_node_id: *(Optional)* Identifier of the backend node.
+ :param object_id: *(Optional)* JavaScript object id of the node wrapper.
+ :param rect: *(Optional)* The rect to be scrolled into view, relative to the node's border box, in CSS pixels. When omitted, center of the node will be used, similar to Element.scrollIntoView.
+ '''
+ params: T_JSON_DICT = dict()
+ if node_id is not None:
+ params['nodeId'] = node_id.to_json()
+ if backend_node_id is not None:
+ params['backendNodeId'] = backend_node_id.to_json()
+ if object_id is not None:
+ params['objectId'] = object_id.to_json()
+ if rect is not None:
+ params['rect'] = rect.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.scrollIntoViewIfNeeded',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Disables DOM agent for the given page.
@@ -583,12 +672,20 @@ def discard_search_results(
json = yield cmd_dict
-def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+def enable(
+ include_whitespace: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enables DOM agent for the given page.
+
+ :param include_whitespace: **(EXPERIMENTAL)** *(Optional)* Whether to include whitespaces in the children array of returned Nodes.
'''
+ params: T_JSON_DICT = dict()
+ if include_whitespace is not None:
+ params['includeWhitespace'] = include_whitespace
cmd_dict: T_JSON_DICT = {
'method': 'DOM.enable',
+ 'params': params,
}
json = yield cmd_dict
@@ -721,12 +818,17 @@ def get_document(
return Node.from_json(json['root'])
+@deprecated(version="1.3")
def get_flattened_document(
depth: typing.Optional[int] = None,
pierce: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Node]]:
'''
Returns the root DOM node (and optionally the subtree) to the caller.
+ Deprecated, as it is not designed to work well with the rest of the DOM agent.
+ Use DOMSnapshot.captureSnapshot instead.
+
+ .. deprecated:: 1.3
:param depth: *(Optional)* The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the entire subtree or provide an integer larger than 0.
:param pierce: *(Optional)* Whether or not iframes and shadow roots should be traversed when returning the subtree (default is false).
@@ -745,30 +847,61 @@ def get_flattened_document(
return [Node.from_json(i) for i in json['nodes']]
+def get_nodes_for_subtree_by_style(
+ node_id: NodeId,
+ computed_styles: typing.List[CSSComputedStyleProperty],
+ pierce: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[NodeId]]:
+ '''
+ Finds nodes with a given computed style in a subtree.
+
+ **EXPERIMENTAL**
+
+ :param node_id: Node ID pointing to the root of a subtree.
+ :param computed_styles: The style to filter nodes by (includes nodes if any of properties matches).
+ :param pierce: *(Optional)* Whether or not iframes and shadow roots in the same target should be traversed when returning the results (default is false).
+ :returns: Resulting nodes.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ params['computedStyles'] = [i.to_json() for i in computed_styles]
+ if pierce is not None:
+ params['pierce'] = pierce
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.getNodesForSubtreeByStyle',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [NodeId.from_json(i) for i in json['nodeIds']]
+
+
def get_node_for_location(
x: int,
y: int,
- include_user_agent_shadow_dom: typing.Optional[bool] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[BackendNodeId, typing.Optional[NodeId]]]:
+ include_user_agent_shadow_dom: typing.Optional[bool] = None,
+ ignore_pointer_events_none: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[BackendNodeId, page.FrameId, typing.Optional[NodeId]]]:
'''
Returns node id at given location. Depending on whether DOM domain is enabled, nodeId is
either returned or not.
- **EXPERIMENTAL**
-
:param x: X coordinate.
:param y: Y coordinate.
:param include_user_agent_shadow_dom: *(Optional)* False to skip to the nearest non-UA shadow root ancestor (default: false).
+ :param ignore_pointer_events_none: *(Optional)* Whether to ignore pointer-events: none on elements and hit test them.
:returns: A tuple with the following items:
0. **backendNodeId** - Resulting node.
- 1. **nodeId** - *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
+ 1. **frameId** - Frame this node belongs to.
+ 2. **nodeId** - *(Optional)* Id of the node at given coordinates, only when enabled and requested document.
'''
params: T_JSON_DICT = dict()
params['x'] = x
params['y'] = y
if include_user_agent_shadow_dom is not None:
params['includeUserAgentShadowDOM'] = include_user_agent_shadow_dom
+ if ignore_pointer_events_none is not None:
+ params['ignorePointerEventsNone'] = ignore_pointer_events_none
cmd_dict: T_JSON_DICT = {
'method': 'DOM.getNodeForLocation',
'params': params,
@@ -776,6 +909,7 @@ def get_node_for_location(
json = yield cmd_dict
return (
BackendNodeId.from_json(json['backendNodeId']),
+ page.FrameId.from_json(json['frameId']),
NodeId.from_json(json['nodeId']) if 'nodeId' in json else None
)
@@ -1249,6 +1383,46 @@ def set_file_input_files(
json = yield cmd_dict
+def set_node_stack_traces_enabled(
+ enable: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets if stack traces should be captured for Nodes. See ``Node.getNodeStackTraces``. Default is disabled.
+
+ **EXPERIMENTAL**
+
+ :param enable: Enable or disable.
+ '''
+ params: T_JSON_DICT = dict()
+ params['enable'] = enable
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.setNodeStackTracesEnabled',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def get_node_stack_traces(
+ node_id: NodeId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[runtime.StackTrace]]:
+ '''
+ Gets stack traces associated with a Node. As of now, only provides stack trace for Node creation.
+
+ **EXPERIMENTAL**
+
+ :param node_id: Id of the node to get stack traces for.
+ :returns: *(Optional)* Creation stack trace, if available.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.getNodeStackTraces',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return runtime.StackTrace.from_json(json['creation']) if 'creation' in json else None
+
+
def get_file_info(
object_id: runtime.RemoteObjectId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
@@ -1392,6 +1566,55 @@ def get_frame_owner(
)
+def get_container_for_node(
+ node_id: NodeId,
+ container_name: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[NodeId]]:
+ '''
+ Returns the container of the given node based on container query conditions.
+ If containerName is given, it will find the nearest container with a matching name;
+ otherwise it will find the nearest container regardless of its container name.
+
+ **EXPERIMENTAL**
+
+ :param node_id:
+ :param container_name: *(Optional)*
+ :returns: *(Optional)* The container node for the given node, or null if not found.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ if container_name is not None:
+ params['containerName'] = container_name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.getContainerForNode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return NodeId.from_json(json['nodeId']) if 'nodeId' in json else None
+
+
+def get_querying_descendants_for_container(
+ node_id: NodeId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[NodeId]]:
+ '''
+ Returns the descendants of a container query container that have
+ container queries against this container.
+
+ **EXPERIMENTAL**
+
+ :param node_id: Id of the container node to find querying descendants from.
+ :returns: Descendant nodes with container queries against the given container.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.getQueryingDescendantsForContainer',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [NodeId.from_json(i) for i in json['nodeIds']]
+
+
@event_class('DOM.attributeModified')
@dataclass
class AttributeModified:
@@ -1518,9 +1741,9 @@ class DistributedNodesUpdated:
'''
**EXPERIMENTAL**
- Called when distrubution is changed.
+ Called when distribution is changed.
'''
- #: Insertion point where distrubuted nodes were updated.
+ #: Insertion point where distributed nodes were updated.
insertion_point_id: NodeId
#: Distributed nodes for given insertion point.
distributed_nodes: typing.List[BackendNode]
diff --git a/cdp/dom_debugger.py b/cdp/dom_debugger.py
index 0b09a58..d1589ac 100644
--- a/cdp/dom_debugger.py
+++ b/cdp/dom_debugger.py
@@ -31,6 +31,21 @@ def from_json(cls, json: str) -> DOMBreakpointType:
return cls(json)
+class CSPViolationType(enum.Enum):
+ '''
+ CSP Violation type.
+ '''
+ TRUSTEDTYPE_SINK_VIOLATION = "trustedtype-sink-violation"
+ TRUSTEDTYPE_POLICY_VIOLATION = "trustedtype-policy-violation"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CSPViolationType:
+ return cls(json)
+
+
@dataclass
class EventListener:
'''
@@ -203,6 +218,25 @@ def remove_xhr_breakpoint(
json = yield cmd_dict
+def set_break_on_csp_violation(
+ violation_types: typing.List[CSPViolationType]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets breakpoint on particular CSP violations.
+
+ **EXPERIMENTAL**
+
+ :param violation_types: CSP Violations to stop upon.
+ '''
+ params: T_JSON_DICT = dict()
+ params['violationTypes'] = [i.to_json() for i in violation_types]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOMDebugger.setBreakOnCSPViolation',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_dom_breakpoint(
node_id: dom.NodeId,
type_: DOMBreakpointType
diff --git a/cdp/dom_snapshot.py b/cdp/dom_snapshot.py
index fa63767..4a93c19 100644
--- a/cdp/dom_snapshot.py
+++ b/cdp/dom_snapshot.py
@@ -445,6 +445,9 @@ class DocumentSnapshot:
#: Document URL that ``Document`` or ``FrameOwner`` node points to.
document_url: StringIndex
+ #: Document title.
+ title: StringIndex
+
#: Base URL that ``Document`` or ``FrameOwner`` node uses for URL completion.
base_url: StringIndex
@@ -478,9 +481,16 @@ class DocumentSnapshot:
#: Vertical scroll offset.
scroll_offset_y: typing.Optional[float] = None
+ #: Document content width.
+ content_width: typing.Optional[float] = None
+
+ #: Document content height.
+ content_height: typing.Optional[float] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['documentURL'] = self.document_url.to_json()
+ json['title'] = self.title.to_json()
json['baseURL'] = self.base_url.to_json()
json['contentLanguage'] = self.content_language.to_json()
json['encodingName'] = self.encoding_name.to_json()
@@ -494,12 +504,17 @@ def to_json(self) -> T_JSON_DICT:
json['scrollOffsetX'] = self.scroll_offset_x
if self.scroll_offset_y is not None:
json['scrollOffsetY'] = self.scroll_offset_y
+ if self.content_width is not None:
+ json['contentWidth'] = self.content_width
+ if self.content_height is not None:
+ json['contentHeight'] = self.content_height
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> DocumentSnapshot:
return cls(
document_url=StringIndex.from_json(json['documentURL']),
+ title=StringIndex.from_json(json['title']),
base_url=StringIndex.from_json(json['baseURL']),
content_language=StringIndex.from_json(json['contentLanguage']),
encoding_name=StringIndex.from_json(json['encodingName']),
@@ -511,6 +526,8 @@ def from_json(cls, json: T_JSON_DICT) -> DocumentSnapshot:
text_boxes=TextBoxSnapshot.from_json(json['textBoxes']),
scroll_offset_x=float(json['scrollOffsetX']) if 'scrollOffsetX' in json else None,
scroll_offset_y=float(json['scrollOffsetY']) if 'scrollOffsetY' in json else None,
+ content_width=float(json['contentWidth']) if 'contentWidth' in json else None,
+ content_height=float(json['contentHeight']) if 'contentHeight' in json else None,
)
@@ -525,6 +542,9 @@ class NodeTreeSnapshot:
#: ``Node``'s nodeType.
node_type: typing.Optional[typing.List[int]] = None
+ #: Type of the shadow root the ``Node`` is in. String values are equal to the ``ShadowRootType`` enum.
+ shadow_root_type: typing.Optional[RareStringData] = None
+
#: ``Node``'s nodeName.
node_name: typing.Optional[typing.List[StringIndex]] = None
@@ -572,6 +592,8 @@ def to_json(self) -> T_JSON_DICT:
json['parentIndex'] = [i for i in self.parent_index]
if self.node_type is not None:
json['nodeType'] = [i for i in self.node_type]
+ if self.shadow_root_type is not None:
+ json['shadowRootType'] = self.shadow_root_type.to_json()
if self.node_name is not None:
json['nodeName'] = [i.to_json() for i in self.node_name]
if self.node_value is not None:
@@ -605,6 +627,7 @@ def from_json(cls, json: T_JSON_DICT) -> NodeTreeSnapshot:
return cls(
parent_index=[int(i) for i in json['parentIndex']] if 'parentIndex' in json else None,
node_type=[int(i) for i in json['nodeType']] if 'nodeType' in json else None,
+ shadow_root_type=RareStringData.from_json(json['shadowRootType']) if 'shadowRootType' in json else None,
node_name=[StringIndex.from_json(i) for i in json['nodeName']] if 'nodeName' in json else None,
node_value=[StringIndex.from_json(i) for i in json['nodeValue']] if 'nodeValue' in json else None,
backend_node_id=[dom.BackendNodeId.from_json(i) for i in json['backendNodeId']] if 'backendNodeId' in json else None,
@@ -641,6 +664,11 @@ class LayoutTreeSnapshot:
#: Stacking context information.
stacking_contexts: RareBooleanData
+ #: Global paint order index, which is determined by the stacking order of the nodes. Nodes
+ #: that are painted together will have the same index. Only provided if includePaintOrder in
+ #: captureSnapshot was true.
+ paint_orders: typing.Optional[typing.List[int]] = None
+
#: The offset rect of nodes. Only available when includeDOMRects is set to true
offset_rects: typing.Optional[typing.List[Rectangle]] = None
@@ -650,6 +678,12 @@ class LayoutTreeSnapshot:
#: The client rect of nodes. Only available when includeDOMRects is set to true
client_rects: typing.Optional[typing.List[Rectangle]] = None
+ #: The list of background colors that are blended with colors of overlapping elements.
+ blended_background_colors: typing.Optional[typing.List[StringIndex]] = None
+
+ #: The list of computed text opacities.
+ text_color_opacities: typing.Optional[typing.List[float]] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['nodeIndex'] = [i for i in self.node_index]
@@ -657,12 +691,18 @@ def to_json(self) -> T_JSON_DICT:
json['bounds'] = [i.to_json() for i in self.bounds]
json['text'] = [i.to_json() for i in self.text]
json['stackingContexts'] = self.stacking_contexts.to_json()
+ if self.paint_orders is not None:
+ json['paintOrders'] = [i for i in self.paint_orders]
if self.offset_rects is not None:
json['offsetRects'] = [i.to_json() for i in self.offset_rects]
if self.scroll_rects is not None:
json['scrollRects'] = [i.to_json() for i in self.scroll_rects]
if self.client_rects is not None:
json['clientRects'] = [i.to_json() for i in self.client_rects]
+ if self.blended_background_colors is not None:
+ json['blendedBackgroundColors'] = [i.to_json() for i in self.blended_background_colors]
+ if self.text_color_opacities is not None:
+ json['textColorOpacities'] = [i for i in self.text_color_opacities]
return json
@classmethod
@@ -673,9 +713,12 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutTreeSnapshot:
bounds=[Rectangle.from_json(i) for i in json['bounds']],
text=[StringIndex.from_json(i) for i in json['text']],
stacking_contexts=RareBooleanData.from_json(json['stackingContexts']),
+ paint_orders=[int(i) for i in json['paintOrders']] if 'paintOrders' in json else None,
offset_rects=[Rectangle.from_json(i) for i in json['offsetRects']] if 'offsetRects' in json else None,
scroll_rects=[Rectangle.from_json(i) for i in json['scrollRects']] if 'scrollRects' in json else None,
client_rects=[Rectangle.from_json(i) for i in json['clientRects']] if 'clientRects' in json else None,
+ blended_background_colors=[StringIndex.from_json(i) for i in json['blendedBackgroundColors']] if 'blendedBackgroundColors' in json else None,
+ text_color_opacities=[float(i) for i in json['textColorOpacities']] if 'textColorOpacities' in json else None,
)
@@ -784,7 +827,10 @@ def get_snapshot(
def capture_snapshot(
computed_styles: typing.List[str],
- include_dom_rects: typing.Optional[bool] = None
+ include_paint_order: typing.Optional[bool] = None,
+ include_dom_rects: typing.Optional[bool] = None,
+ include_blended_background_colors: typing.Optional[bool] = None,
+ include_text_color_opacities: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[DocumentSnapshot], typing.List[str]]]:
'''
Returns a document snapshot, including the full DOM tree of the root node (including iframes,
@@ -793,7 +839,10 @@ def capture_snapshot(
flattened.
:param computed_styles: Whitelist of computed styles to return.
+ :param include_paint_order: *(Optional)* Whether to include layout object paint orders into the snapshot.
:param include_dom_rects: *(Optional)* Whether to include DOM rectangles (offsetRects, clientRects, scrollRects) into the snapshot
+ :param include_blended_background_colors: **(EXPERIMENTAL)** *(Optional)* Whether to include blended background colors in the snapshot (default: false). Blended background color is achieved by blending background colors of all elements that overlap with the current element.
+ :param include_text_color_opacities: **(EXPERIMENTAL)** *(Optional)* Whether to include text color opacity in the snapshot (default: false). An element might have the opacity property set that affects the text color of the element. The final text color opacity is computed based on the opacity of all overlapping elements.
:returns: A tuple with the following items:
0. **documents** - The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.
@@ -801,8 +850,14 @@ def capture_snapshot(
'''
params: T_JSON_DICT = dict()
params['computedStyles'] = [i for i in computed_styles]
+ if include_paint_order is not None:
+ params['includePaintOrder'] = include_paint_order
if include_dom_rects is not None:
params['includeDOMRects'] = include_dom_rects
+ if include_blended_background_colors is not None:
+ params['includeBlendedBackgroundColors'] = include_blended_background_colors
+ if include_text_color_opacities is not None:
+ params['includeTextColorOpacities'] = include_text_color_opacities
cmd_dict: T_JSON_DICT = {
'method': 'DOMSnapshot.captureSnapshot',
'params': params,
diff --git a/cdp/emulation.py b/cdp/emulation.py
index cdee287..40e10e8 100644
--- a/cdp/emulation.py
+++ b/cdp/emulation.py
@@ -42,6 +42,56 @@ def from_json(cls, json: T_JSON_DICT) -> ScreenOrientation:
)
+@dataclass
+class DisplayFeature:
+ #: Orientation of a display feature in relation to screen
+ orientation: str
+
+ #: The offset from the screen origin in either the x (for vertical
+ #: orientation) or y (for horizontal orientation) direction.
+ offset: int
+
+ #: A display feature may mask content such that it is not physically
+ #: displayed - this length along with the offset describes this area.
+ #: A display feature that only splits content will have a 0 mask_length.
+ mask_length: int
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['orientation'] = self.orientation
+ json['offset'] = self.offset
+ json['maskLength'] = self.mask_length
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DisplayFeature:
+ return cls(
+ orientation=str(json['orientation']),
+ offset=int(json['offset']),
+ mask_length=int(json['maskLength']),
+ )
+
+
+@dataclass
+class MediaFeature:
+ name: str
+
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> MediaFeature:
+ return cls(
+ name=str(json['name']),
+ value=str(json['value']),
+ )
+
+
class VirtualTimePolicy(enum.Enum):
'''
advance: If the scheduler runs out of immediate work, the virtual time base may fast forward to
@@ -61,6 +111,96 @@ def from_json(cls, json: str) -> VirtualTimePolicy:
return cls(json)
+@dataclass
+class UserAgentBrandVersion:
+ '''
+ Used to specify User Agent Cient Hints to emulate. See https://wicg.github.io/ua-client-hints
+ '''
+ brand: str
+
+ version: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['brand'] = self.brand
+ json['version'] = self.version
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> UserAgentBrandVersion:
+ return cls(
+ brand=str(json['brand']),
+ version=str(json['version']),
+ )
+
+
+@dataclass
+class UserAgentMetadata:
+ '''
+ Used to specify User Agent Cient Hints to emulate. See https://wicg.github.io/ua-client-hints
+ Missing optional values will be filled in by the target with what it would normally use.
+ '''
+ platform: str
+
+ platform_version: str
+
+ architecture: str
+
+ model: str
+
+ mobile: bool
+
+ brands: typing.Optional[typing.List[UserAgentBrandVersion]] = None
+
+ full_version_list: typing.Optional[typing.List[UserAgentBrandVersion]] = None
+
+ full_version: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['platform'] = self.platform
+ json['platformVersion'] = self.platform_version
+ json['architecture'] = self.architecture
+ json['model'] = self.model
+ json['mobile'] = self.mobile
+ if self.brands is not None:
+ json['brands'] = [i.to_json() for i in self.brands]
+ if self.full_version_list is not None:
+ json['fullVersionList'] = [i.to_json() for i in self.full_version_list]
+ if self.full_version is not None:
+ json['fullVersion'] = self.full_version
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> UserAgentMetadata:
+ return cls(
+ platform=str(json['platform']),
+ platform_version=str(json['platformVersion']),
+ architecture=str(json['architecture']),
+ model=str(json['model']),
+ mobile=bool(json['mobile']),
+ brands=[UserAgentBrandVersion.from_json(i) for i in json['brands']] if 'brands' in json else None,
+ full_version_list=[UserAgentBrandVersion.from_json(i) for i in json['fullVersionList']] if 'fullVersionList' in json else None,
+ full_version=str(json['fullVersion']) if 'fullVersion' in json else None,
+ )
+
+
+class DisabledImageType(enum.Enum):
+ '''
+ Enum of image types that can be disabled.
+ '''
+ AVIF = "avif"
+ JXL = "jxl"
+ WEBP = "webp"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> DisabledImageType:
+ return cls(json)
+
+
def can_emulate() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
'''
Tells whether emulation is supported.
@@ -76,7 +216,7 @@ def can_emulate() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
def clear_device_metrics_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Clears the overriden device metrics.
+ Clears the overridden device metrics.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Emulation.clearDeviceMetricsOverride',
@@ -86,7 +226,7 @@ def clear_device_metrics_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,
def clear_geolocation_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Clears the overriden Geolocation Position and Error.
+ Clears the overridden Geolocation Position and Error.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Emulation.clearGeolocationOverride',
@@ -125,6 +265,26 @@ def set_focus_emulation_enabled(
json = yield cmd_dict
+def set_auto_dark_mode_override(
+ enabled: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Automatically render all web contents using a dark theme.
+
+ **EXPERIMENTAL**
+
+ :param enabled: *(Optional)* Whether to enable or disable automatic dark mode. If not specified, any existing override will be cleared.
+ '''
+ params: T_JSON_DICT = dict()
+ if enabled is not None:
+ params['enabled'] = enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setAutoDarkModeOverride',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_cpu_throttling_rate(
rate: float
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -175,7 +335,8 @@ def set_device_metrics_override(
position_y: typing.Optional[int] = None,
dont_set_visible_size: typing.Optional[bool] = None,
screen_orientation: typing.Optional[ScreenOrientation] = None,
- viewport: typing.Optional[page.Viewport] = None
+ viewport: typing.Optional[page.Viewport] = None,
+ display_feature: typing.Optional[DisplayFeature] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Overrides the values of device screen dimensions (window.screen.width, window.screen.height,
@@ -194,6 +355,7 @@ def set_device_metrics_override(
:param dont_set_visible_size: **(EXPERIMENTAL)** *(Optional)* Do not set visible view size, rely upon explicit setVisibleSize call.
:param screen_orientation: *(Optional)* Screen orientation override.
:param viewport: **(EXPERIMENTAL)** *(Optional)* If set, the visible area of the page will be overridden to this viewport. This viewport change is not observed by the page, e.g. viewport-relative elements do not change positions.
+ :param display_feature: **(EXPERIMENTAL)** *(Optional)* If set, the display feature of a multi-segment screen. If not set, multi-segment support is turned-off.
'''
params: T_JSON_DICT = dict()
params['width'] = width
@@ -216,6 +378,8 @@ def set_device_metrics_override(
params['screenOrientation'] = screen_orientation.to_json()
if viewport is not None:
params['viewport'] = viewport.to_json()
+ if display_feature is not None:
+ params['displayFeature'] = display_feature.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Emulation.setDeviceMetricsOverride',
'params': params,
@@ -285,15 +449,20 @@ def set_emit_touch_events_for_mouse(
def set_emulated_media(
- media: str
+ media: typing.Optional[str] = None,
+ features: typing.Optional[typing.List[MediaFeature]] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Emulates the given media for CSS media queries.
+ Emulates the given media type or media feature for CSS media queries.
- :param media: Media type to emulate. Empty string disables the override.
+ :param media: *(Optional)* Media type to emulate. Empty string disables the override.
+ :param features: *(Optional)* Media features to emulate.
'''
params: T_JSON_DICT = dict()
- params['media'] = media
+ if media is not None:
+ params['media'] = media
+ if features is not None:
+ params['features'] = [i.to_json() for i in features]
cmd_dict: T_JSON_DICT = {
'method': 'Emulation.setEmulatedMedia',
'params': params,
@@ -301,6 +470,25 @@ def set_emulated_media(
json = yield cmd_dict
+def set_emulated_vision_deficiency(
+ type_: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Emulates the given vision deficiency.
+
+ **EXPERIMENTAL**
+
+ :param type_: Vision deficiency to emulate.
+ '''
+ params: T_JSON_DICT = dict()
+ params['type'] = type_
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setEmulatedVisionDeficiency',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_geolocation_override(
latitude: typing.Optional[float] = None,
longitude: typing.Optional[float] = None,
@@ -328,6 +516,40 @@ def set_geolocation_override(
json = yield cmd_dict
+def set_idle_override(
+ is_user_active: bool,
+ is_screen_unlocked: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Overrides the Idle state.
+
+ **EXPERIMENTAL**
+
+ :param is_user_active: Mock isUserActive
+ :param is_screen_unlocked: Mock isScreenUnlocked
+ '''
+ params: T_JSON_DICT = dict()
+ params['isUserActive'] = is_user_active
+ params['isScreenUnlocked'] = is_screen_unlocked
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setIdleOverride',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def clear_idle_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears Idle state overrides.
+
+ **EXPERIMENTAL**
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.clearIdleOverride',
+ }
+ json = yield cmd_dict
+
+
@deprecated(version="1.3")
def set_navigator_overrides(
platform: str
@@ -411,7 +633,6 @@ def set_virtual_time_policy(
policy: VirtualTimePolicy,
budget: typing.Optional[float] = None,
max_virtual_time_task_starvation_count: typing.Optional[int] = None,
- wait_for_navigation: typing.Optional[bool] = None,
initial_virtual_time: typing.Optional[network.TimeSinceEpoch] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,float]:
'''
@@ -423,8 +644,7 @@ def set_virtual_time_policy(
:param policy:
:param budget: *(Optional)* If set, after this many virtual milliseconds have elapsed virtual time will be paused and a virtualTimeBudgetExpired event is sent.
:param max_virtual_time_task_starvation_count: *(Optional)* If set this specifies the maximum number of tasks that can be run before virtual is forced forwards to prevent deadlock.
- :param wait_for_navigation: *(Optional)* If set the virtual time policy change should be deferred until any frame starts navigating. Note any previous deferred policy change is superseded.
- :param initial_virtual_time: *(Optional)* If set, base::Time::Now will be overriden to initially return this value.
+ :param initial_virtual_time: *(Optional)* If set, base::Time::Now will be overridden to initially return this value.
:returns: Absolute timestamp at which virtual time was first enabled (up time in milliseconds).
'''
params: T_JSON_DICT = dict()
@@ -433,8 +653,6 @@ def set_virtual_time_policy(
params['budget'] = budget
if max_virtual_time_task_starvation_count is not None:
params['maxVirtualTimeTaskStarvationCount'] = max_virtual_time_task_starvation_count
- if wait_for_navigation is not None:
- params['waitForNavigation'] = wait_for_navigation
if initial_virtual_time is not None:
params['initialVirtualTime'] = initial_virtual_time.to_json()
cmd_dict: T_JSON_DICT = {
@@ -445,6 +663,26 @@ def set_virtual_time_policy(
return float(json['virtualTimeTicksBase'])
+def set_locale_override(
+ locale: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Overrides default host system locale with the specified one.
+
+ **EXPERIMENTAL**
+
+ :param locale: *(Optional)* ICU style C locale (e.g. "en_US"). If not specified or empty, disables the override and restores default host system locale.
+ '''
+ params: T_JSON_DICT = dict()
+ if locale is not None:
+ params['locale'] = locale
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setLocaleOverride',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_timezone_override(
timezone_id: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -491,10 +729,30 @@ def set_visible_size(
json = yield cmd_dict
+def set_disabled_image_types(
+ image_types: typing.List[DisabledImageType]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+
+
+ **EXPERIMENTAL**
+
+ :param image_types: Image types to disable.
+ '''
+ params: T_JSON_DICT = dict()
+ params['imageTypes'] = [i.to_json() for i in image_types]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setDisabledImageTypes',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_user_agent_override(
user_agent: str,
accept_language: typing.Optional[str] = None,
- platform: typing.Optional[str] = None
+ platform: typing.Optional[str] = None,
+ user_agent_metadata: typing.Optional[UserAgentMetadata] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Allows overriding user agent with the given string.
@@ -502,6 +760,7 @@ def set_user_agent_override(
:param user_agent: User agent to use.
:param accept_language: *(Optional)* Browser langugage to emulate.
:param platform: *(Optional)* The platform navigator.platform should return.
+ :param user_agent_metadata: **(EXPERIMENTAL)** *(Optional)* To be sent in Sec-CH-UA-* headers and returned in navigator.userAgentData
'''
params: T_JSON_DICT = dict()
params['userAgent'] = user_agent
@@ -509,6 +768,8 @@ def set_user_agent_override(
params['acceptLanguage'] = accept_language
if platform is not None:
params['platform'] = platform
+ if user_agent_metadata is not None:
+ params['userAgentMetadata'] = user_agent_metadata.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Emulation.setUserAgentOverride',
'params': params,
diff --git a/cdp/event_breakpoints.py b/cdp/event_breakpoints.py
new file mode 100644
index 0000000..5eb47fa
--- /dev/null
+++ b/cdp/event_breakpoints.py
@@ -0,0 +1,46 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: EventBreakpoints (experimental)
+
+from __future__ import annotations
+from cdp.util import event_class, T_JSON_DICT
+from dataclasses import dataclass
+import enum
+import typing
+
+
+def set_instrumentation_breakpoint(
+ event_name: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets breakpoint on particular native event.
+
+ :param event_name: Instrumentation name to stop on.
+ '''
+ params: T_JSON_DICT = dict()
+ params['eventName'] = event_name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'EventBreakpoints.setInstrumentationBreakpoint',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def remove_instrumentation_breakpoint(
+ event_name: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Removes breakpoint on particular native event.
+
+ :param event_name: Instrumentation name to stop on.
+ '''
+ params: T_JSON_DICT = dict()
+ params['eventName'] = event_name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'EventBreakpoints.removeInstrumentationBreakpoint',
+ 'params': params,
+ }
+ json = yield cmd_dict
diff --git a/cdp/fetch.py b/cdp/fetch.py
index 1043e71..1708f6a 100644
--- a/cdp/fetch.py
+++ b/cdp/fetch.py
@@ -3,7 +3,7 @@
# This file is generated from the CDP specification. If you need to make
# changes, edit the generator and regenerate all of the modules.
#
-# CDP domain: Fetch (experimental)
+# CDP domain: Fetch
from __future__ import annotations
from cdp.util import event_class, T_JSON_DICT
@@ -35,7 +35,7 @@ class RequestStage(enum.Enum):
'''
Stages of the request to handle. Request will intercept before the request is
sent. Response will intercept after the response is received (but before response
- body is received.
+ body is received).
'''
REQUEST = "Request"
RESPONSE = "Response"
@@ -50,14 +50,14 @@ def from_json(cls, json: str) -> RequestStage:
@dataclass
class RequestPattern:
- #: Wildcards ('*' -> zero or more, '?' -> exactly one) are allowed. Escape character is
- #: backslash. Omitting is equivalent to "*".
+ #: Wildcards (``'*'`` -> zero or more, ``'?'`` -> exactly one) are allowed. Escape character is
+ #: backslash. Omitting is equivalent to ``"*"``.
url_pattern: typing.Optional[str] = None
#: If set, only requests for matching resource types will be intercepted.
resource_type: typing.Optional[network.ResourceType] = None
- #: Stage at wich to begin intercepting requests. Default is Request.
+ #: Stage at which to begin intercepting requests. Default is Request.
request_stage: typing.Optional[RequestStage] = None
def to_json(self) -> T_JSON_DICT:
@@ -230,7 +230,8 @@ def fail_request(
def fulfill_request(
request_id: RequestId,
response_code: int,
- response_headers: typing.List[HeaderEntry],
+ response_headers: typing.Optional[typing.List[HeaderEntry]] = None,
+ binary_response_headers: typing.Optional[str] = None,
body: typing.Optional[str] = None,
response_phrase: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -239,14 +240,18 @@ def fulfill_request(
:param request_id: An id the client received in requestPaused event.
:param response_code: An HTTP response code.
- :param response_headers: Response headers.
- :param body: *(Optional)* A response body.
- :param response_phrase: *(Optional)* A textual representation of responseCode. If absent, a standard phrase mathcing responseCode is used.
+ :param response_headers: *(Optional)* Response headers.
+ :param binary_response_headers: *(Optional)* Alternative way of specifying response headers as a \0-separated series of name: value pairs. Prefer the above method unless you need to represent some non-UTF8 values that can't be transmitted over the protocol as text. (Encoded as a base64 string when passed over JSON)
+ :param body: *(Optional)* A response body. If absent, original response body will be used if the request is intercepted at the response stage and empty body will be used if the request is intercepted at the request stage. (Encoded as a base64 string when passed over JSON)
+ :param response_phrase: *(Optional)* A textual representation of responseCode. If absent, a standard phrase matching responseCode is used.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
params['responseCode'] = response_code
- params['responseHeaders'] = [i.to_json() for i in response_headers]
+ if response_headers is not None:
+ params['responseHeaders'] = [i.to_json() for i in response_headers]
+ if binary_response_headers is not None:
+ params['binaryResponseHeaders'] = binary_response_headers
if body is not None:
params['body'] = body
if response_phrase is not None:
@@ -263,7 +268,8 @@ def continue_request(
url: typing.Optional[str] = None,
method: typing.Optional[str] = None,
post_data: typing.Optional[str] = None,
- headers: typing.Optional[typing.List[HeaderEntry]] = None
+ headers: typing.Optional[typing.List[HeaderEntry]] = None,
+ intercept_response: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Continues the request, optionally modifying some of its parameters.
@@ -271,8 +277,9 @@ def continue_request(
:param request_id: An id the client received in requestPaused event.
:param url: *(Optional)* If set, the request url will be modified in a way that's not observable by page.
:param method: *(Optional)* If set, the request method is overridden.
- :param post_data: *(Optional)* If set, overrides the post data in the request.
- :param headers: *(Optional)* If set, overrides the request headrts.
+ :param post_data: *(Optional)* If set, overrides the post data in the request. (Encoded as a base64 string when passed over JSON)
+ :param headers: *(Optional)* If set, overrides the request headers.
+ :param intercept_response: **(EXPERIMENTAL)** *(Optional)* If set, overrides response interception behavior for this request.
'''
params: T_JSON_DICT = dict()
params['requestId'] = request_id.to_json()
@@ -284,6 +291,8 @@ def continue_request(
params['postData'] = post_data
if headers is not None:
params['headers'] = [i.to_json() for i in headers]
+ if intercept_response is not None:
+ params['interceptResponse'] = intercept_response
cmd_dict: T_JSON_DICT = {
'method': 'Fetch.continueRequest',
'params': params,
@@ -311,6 +320,43 @@ def continue_with_auth(
json = yield cmd_dict
+def continue_response(
+ request_id: RequestId,
+ response_code: typing.Optional[int] = None,
+ response_phrase: typing.Optional[str] = None,
+ response_headers: typing.Optional[typing.List[HeaderEntry]] = None,
+ binary_response_headers: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Continues loading of the paused response, optionally modifying the
+ response headers. If either responseCode or headers are modified, all of them
+ must be present.
+
+ **EXPERIMENTAL**
+
+ :param request_id: An id the client received in requestPaused event.
+ :param response_code: *(Optional)* An HTTP response code. If absent, original response code will be used.
+ :param response_phrase: *(Optional)* A textual representation of responseCode. If absent, a standard phrase matching responseCode is used.
+ :param response_headers: *(Optional)* Response headers. If absent, original response headers will be used.
+ :param binary_response_headers: *(Optional)* Alternative way of specifying response headers as a \0-separated series of name: value pairs. Prefer the above method unless you need to represent some non-UTF8 values that can't be transmitted over the protocol as text. (Encoded as a base64 string when passed over JSON)
+ '''
+ params: T_JSON_DICT = dict()
+ params['requestId'] = request_id.to_json()
+ if response_code is not None:
+ params['responseCode'] = response_code
+ if response_phrase is not None:
+ params['responsePhrase'] = response_phrase
+ if response_headers is not None:
+ params['responseHeaders'] = [i.to_json() for i in response_headers]
+ if binary_response_headers is not None:
+ params['binaryResponseHeaders'] = binary_response_headers
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Fetch.continueResponse',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def get_response_body(
request_id: RequestId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, bool]]:
@@ -392,6 +438,8 @@ class RequestPaused:
response_error_reason: typing.Optional[network.ErrorReason]
#: Response code if intercepted at response stage.
response_status_code: typing.Optional[int]
+ #: Response status text if intercepted at response stage.
+ response_status_text: typing.Optional[str]
#: Response headers if intercepted at the response stage.
response_headers: typing.Optional[typing.List[HeaderEntry]]
#: If the intercepted request had a corresponding Network.requestWillBeSent event fired for it,
@@ -407,6 +455,7 @@ def from_json(cls, json: T_JSON_DICT) -> RequestPaused:
resource_type=network.ResourceType.from_json(json['resourceType']),
response_error_reason=network.ErrorReason.from_json(json['responseErrorReason']) if 'responseErrorReason' in json else None,
response_status_code=int(json['responseStatusCode']) if 'responseStatusCode' in json else None,
+ response_status_text=str(json['responseStatusText']) if 'responseStatusText' in json else None,
response_headers=[HeaderEntry.from_json(i) for i in json['responseHeaders']] if 'responseHeaders' in json else None,
network_id=RequestId.from_json(json['networkId']) if 'networkId' in json else None
)
diff --git a/cdp/headless_experimental.py b/cdp/headless_experimental.py
index 72f81be..a4d6c4b 100644
--- a/cdp/headless_experimental.py
+++ b/cdp/headless_experimental.py
@@ -12,6 +12,9 @@
import typing
+from deprecated.sphinx import deprecated # type: ignore
+
+
@dataclass
class ScreenshotParams:
'''
@@ -58,7 +61,7 @@ def begin_frame(
:returns: A tuple with the following items:
0. **hasDamage** - Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the display. Reported for diagnostic uses, may be removed in the future.
- 1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken.
+ 1. **screenshotData** - *(Optional)* Base64-encoded image data of the screenshot, if one was requested and successfully taken. (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
if frame_time_ticks is not None:
@@ -100,11 +103,14 @@ def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
+@deprecated(version="1.3")
@event_class('HeadlessExperimental.needsBeginFramesChanged')
@dataclass
class NeedsBeginFramesChanged:
'''
Issued when the target starts or stops needing BeginFrames.
+ Deprecated. Issue beginFrame unconditionally instead and use result from
+ beginFrame to detect whether the frames were suppressed.
'''
#: True if BeginFrames are needed, false otherwise.
needs_begin_frames: bool
diff --git a/cdp/heap_profiler.py b/cdp/heap_profiler.py
index ab29f7c..1ea8e45 100644
--- a/cdp/heap_profiler.py
+++ b/cdp/heap_profiler.py
@@ -257,14 +257,22 @@ def stop_sampling() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SamplingHeapProf
def stop_tracking_heap_objects(
- report_progress: typing.Optional[bool] = None
+ report_progress: typing.Optional[bool] = None,
+ treat_global_objects_as_roots: typing.Optional[bool] = None,
+ capture_numeric_value: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
:param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped.
+ :param treat_global_objects_as_roots: *(Optional)*
+ :param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot
'''
params: T_JSON_DICT = dict()
if report_progress is not None:
params['reportProgress'] = report_progress
+ if treat_global_objects_as_roots is not None:
+ params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots
+ if capture_numeric_value is not None:
+ params['captureNumericValue'] = capture_numeric_value
cmd_dict: T_JSON_DICT = {
'method': 'HeapProfiler.stopTrackingHeapObjects',
'params': params,
@@ -273,14 +281,22 @@ def stop_tracking_heap_objects(
def take_heap_snapshot(
- report_progress: typing.Optional[bool] = None
+ report_progress: typing.Optional[bool] = None,
+ treat_global_objects_as_roots: typing.Optional[bool] = None,
+ capture_numeric_value: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
:param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken.
+ :param treat_global_objects_as_roots: *(Optional)* If true, a raw snapshot without artificial roots will be generated
+ :param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot
'''
params: T_JSON_DICT = dict()
if report_progress is not None:
params['reportProgress'] = report_progress
+ if treat_global_objects_as_roots is not None:
+ params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots
+ if capture_numeric_value is not None:
+ params['captureNumericValue'] = capture_numeric_value
cmd_dict: T_JSON_DICT = {
'method': 'HeapProfiler.takeHeapSnapshot',
'params': params,
diff --git a/cdp/input_.py b/cdp/input_.py
index 4bb2092..8c67b46 100644
--- a/cdp/input_.py
+++ b/cdp/input_.py
@@ -33,6 +33,18 @@ class TouchPoint:
#: Force (default: 1.0).
force: typing.Optional[float] = None
+ #: The normalized tangential pressure, which has a range of [-1,1] (default: 0).
+ tangential_pressure: typing.Optional[float] = None
+
+ #: The plane angle between the Y-Z plane and the plane containing both the stylus axis and the Y axis, in degrees of the range [-90,90], a positive tiltX is to the right (default: 0)
+ tilt_x: typing.Optional[int] = None
+
+ #: The plane angle between the X-Z plane and the plane containing both the stylus axis and the X axis, in degrees of the range [-90,90], a positive tiltY is towards the user (default: 0).
+ tilt_y: typing.Optional[int] = None
+
+ #: The clockwise rotation of a pen stylus around its own major axis, in degrees in the range [0,359] (default: 0).
+ twist: typing.Optional[int] = None
+
#: Identifier used to track touch sources between events, must be unique within an event.
id_: typing.Optional[float] = None
@@ -48,6 +60,14 @@ def to_json(self) -> T_JSON_DICT:
json['rotationAngle'] = self.rotation_angle
if self.force is not None:
json['force'] = self.force
+ if self.tangential_pressure is not None:
+ json['tangentialPressure'] = self.tangential_pressure
+ if self.tilt_x is not None:
+ json['tiltX'] = self.tilt_x
+ if self.tilt_y is not None:
+ json['tiltY'] = self.tilt_y
+ if self.twist is not None:
+ json['twist'] = self.twist
if self.id_ is not None:
json['id'] = self.id_
return json
@@ -61,6 +81,10 @@ def from_json(cls, json: T_JSON_DICT) -> TouchPoint:
radius_y=float(json['radiusY']) if 'radiusY' in json else None,
rotation_angle=float(json['rotationAngle']) if 'rotationAngle' in json else None,
force=float(json['force']) if 'force' in json else None,
+ tangential_pressure=float(json['tangentialPressure']) if 'tangentialPressure' in json else None,
+ tilt_x=int(json['tiltX']) if 'tiltX' in json else None,
+ tilt_y=int(json['tiltY']) if 'tiltY' in json else None,
+ twist=int(json['twist']) if 'twist' in json else None,
id_=float(json['id']) if 'id' in json else None,
)
@@ -78,6 +102,22 @@ def from_json(cls, json: str) -> GestureSourceType:
return cls(json)
+class MouseButton(enum.Enum):
+ NONE = "none"
+ LEFT = "left"
+ MIDDLE = "middle"
+ RIGHT = "right"
+ BACK = "back"
+ FORWARD = "forward"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> MouseButton:
+ return cls(json)
+
+
class TimeSinceEpoch(float):
'''
UTC time in seconds, counted from January 1, 1970.
@@ -93,6 +133,101 @@ def __repr__(self):
return 'TimeSinceEpoch({})'.format(super().__repr__())
+@dataclass
+class DragDataItem:
+ #: Mime type of the dragged data.
+ mime_type: str
+
+ #: Depending of the value of ``mimeType``, it contains the dragged link,
+ #: text, HTML markup or any other data.
+ data: str
+
+ #: Title associated with a link. Only valid when ``mimeType`` == "text/uri-list".
+ title: typing.Optional[str] = None
+
+ #: Stores the base URL for the contained markup. Only valid when ``mimeType``
+ #: == "text/html".
+ base_url: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['mimeType'] = self.mime_type
+ json['data'] = self.data
+ if self.title is not None:
+ json['title'] = self.title
+ if self.base_url is not None:
+ json['baseURL'] = self.base_url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DragDataItem:
+ return cls(
+ mime_type=str(json['mimeType']),
+ data=str(json['data']),
+ title=str(json['title']) if 'title' in json else None,
+ base_url=str(json['baseURL']) if 'baseURL' in json else None,
+ )
+
+
+@dataclass
+class DragData:
+ items: typing.List[DragDataItem]
+
+ #: Bit field representing allowed drag operations. Copy = 1, Link = 2, Move = 16
+ drag_operations_mask: int
+
+ #: List of filenames that should be included when dropping
+ files: typing.Optional[typing.List[str]] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['items'] = [i.to_json() for i in self.items]
+ json['dragOperationsMask'] = self.drag_operations_mask
+ if self.files is not None:
+ json['files'] = [i for i in self.files]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DragData:
+ return cls(
+ items=[DragDataItem.from_json(i) for i in json['items']],
+ drag_operations_mask=int(json['dragOperationsMask']),
+ files=[str(i) for i in json['files']] if 'files' in json else None,
+ )
+
+
+def dispatch_drag_event(
+ type_: str,
+ x: float,
+ y: float,
+ data: DragData,
+ modifiers: typing.Optional[int] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Dispatches a drag event into the page.
+
+ **EXPERIMENTAL**
+
+ :param type_: Type of the drag event.
+ :param x: X coordinate of the event relative to the main frame's viewport in CSS pixels.
+ :param y: Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to the top of the viewport and Y increases as it proceeds towards the bottom of the viewport.
+ :param data:
+ :param modifiers: *(Optional)* Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8 (default: 0).
+ '''
+ params: T_JSON_DICT = dict()
+ params['type'] = type_
+ params['x'] = x
+ params['y'] = y
+ params['data'] = data.to_json()
+ if modifiers is not None:
+ params['modifiers'] = modifiers
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Input.dispatchDragEvent',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def dispatch_key_event(
type_: str,
modifiers: typing.Optional[int] = None,
@@ -107,7 +242,8 @@ def dispatch_key_event(
auto_repeat: typing.Optional[bool] = None,
is_keypad: typing.Optional[bool] = None,
is_system_key: typing.Optional[bool] = None,
- location: typing.Optional[int] = None
+ location: typing.Optional[int] = None,
+ commands: typing.Optional[typing.List[str]] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Dispatches a key event to the page.
@@ -115,7 +251,7 @@ def dispatch_key_event(
:param type_: Type of the key event.
:param modifiers: *(Optional)* Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8 (default: 0).
:param timestamp: *(Optional)* Time at which the event occurred.
- :param text: *(Optional)* Text as generated by processing a virtual key code with a keyboard layout. Not needed for for ```keyUp```` and ````rawKeyDown``` events (default: "")
+ :param text: *(Optional)* Text as generated by processing a virtual key code with a keyboard layout. Not needed for for ```keyUp```` and ````rawKeyDown```` events (default: "")
:param unmodified_text: *(Optional)* Text that would have been generated by the keyboard if no modifiers were pressed (except for shift). Useful for shortcut (accelerator) key handling (default: "").
:param key_identifier: *(Optional)* Unique key identifier (e.g., 'U+0041') (default: "").
:param code: *(Optional)* Unique DOM defined string value for each physical key (e.g., 'KeyA') (default: "").
@@ -126,6 +262,7 @@ def dispatch_key_event(
:param is_keypad: *(Optional)* Whether the event was generated from the keypad (default: false).
:param is_system_key: *(Optional)* Whether the event was a system key event (default: false).
:param location: *(Optional)* Whether the event was from the left or right side of the keyboard. 1=Left, 2=Right (default: 0).
+ :param commands: **(EXPERIMENTAL)** *(Optional)* Editing commands to send with the key event (e.g., 'selectAll') (default: []). These are related to but not equal the command names used in ````document.execCommand``` and NSStandardKeyBindingResponding. See https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/editing/commands/editor_command_names.h for valid command names.
'''
params: T_JSON_DICT = dict()
params['type'] = type_
@@ -155,6 +292,8 @@ def dispatch_key_event(
params['isSystemKey'] = is_system_key
if location is not None:
params['location'] = location
+ if commands is not None:
+ params['commands'] = [i for i in commands]
cmd_dict: T_JSON_DICT = {
'method': 'Input.dispatchKeyEvent',
'params': params,
@@ -182,15 +321,55 @@ def insert_text(
json = yield cmd_dict
+def ime_set_composition(
+ text: str,
+ selection_start: int,
+ selection_end: int,
+ replacement_start: typing.Optional[int] = None,
+ replacement_end: typing.Optional[int] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ This method sets the current candidate text for ime.
+ Use imeCommitComposition to commit the final text.
+ Use imeSetComposition with empty string as text to cancel composition.
+
+ **EXPERIMENTAL**
+
+ :param text: The text to insert
+ :param selection_start: selection start
+ :param selection_end: selection end
+ :param replacement_start: *(Optional)* replacement start
+ :param replacement_end: *(Optional)* replacement end
+ '''
+ params: T_JSON_DICT = dict()
+ params['text'] = text
+ params['selectionStart'] = selection_start
+ params['selectionEnd'] = selection_end
+ if replacement_start is not None:
+ params['replacementStart'] = replacement_start
+ if replacement_end is not None:
+ params['replacementEnd'] = replacement_end
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Input.imeSetComposition',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def dispatch_mouse_event(
type_: str,
x: float,
y: float,
modifiers: typing.Optional[int] = None,
timestamp: typing.Optional[TimeSinceEpoch] = None,
- button: typing.Optional[str] = None,
+ button: typing.Optional[MouseButton] = None,
buttons: typing.Optional[int] = None,
click_count: typing.Optional[int] = None,
+ force: typing.Optional[float] = None,
+ tangential_pressure: typing.Optional[float] = None,
+ tilt_x: typing.Optional[int] = None,
+ tilt_y: typing.Optional[int] = None,
+ twist: typing.Optional[int] = None,
delta_x: typing.Optional[float] = None,
delta_y: typing.Optional[float] = None,
pointer_type: typing.Optional[str] = None
@@ -206,6 +385,11 @@ def dispatch_mouse_event(
:param button: *(Optional)* Mouse button (default: "none").
:param buttons: *(Optional)* A number indicating which buttons are pressed on the mouse when a mouse event is triggered. Left=1, Right=2, Middle=4, Back=8, Forward=16, None=0.
:param click_count: *(Optional)* Number of times the mouse button was clicked (default: 0).
+ :param force: **(EXPERIMENTAL)** *(Optional)* The normalized pressure, which has a range of [0,1] (default: 0).
+ :param tangential_pressure: **(EXPERIMENTAL)** *(Optional)* The normalized tangential pressure, which has a range of [-1,1] (default: 0).
+ :param tilt_x: **(EXPERIMENTAL)** *(Optional)* The plane angle between the Y-Z plane and the plane containing both the stylus axis and the Y axis, in degrees of the range [-90,90], a positive tiltX is to the right (default: 0).
+ :param tilt_y: **(EXPERIMENTAL)** *(Optional)* The plane angle between the X-Z plane and the plane containing both the stylus axis and the X axis, in degrees of the range [-90,90], a positive tiltY is towards the user (default: 0).
+ :param twist: **(EXPERIMENTAL)** *(Optional)* The clockwise rotation of a pen stylus around its own major axis, in degrees in the range [0,359] (default: 0).
:param delta_x: *(Optional)* X delta in CSS pixels for mouse wheel event (default: 0).
:param delta_y: *(Optional)* Y delta in CSS pixels for mouse wheel event (default: 0).
:param pointer_type: *(Optional)* Pointer type (default: "mouse").
@@ -219,11 +403,21 @@ def dispatch_mouse_event(
if timestamp is not None:
params['timestamp'] = timestamp.to_json()
if button is not None:
- params['button'] = button
+ params['button'] = button.to_json()
if buttons is not None:
params['buttons'] = buttons
if click_count is not None:
params['clickCount'] = click_count
+ if force is not None:
+ params['force'] = force
+ if tangential_pressure is not None:
+ params['tangentialPressure'] = tangential_pressure
+ if tilt_x is not None:
+ params['tiltX'] = tilt_x
+ if tilt_y is not None:
+ params['tiltY'] = tilt_y
+ if twist is not None:
+ params['twist'] = twist
if delta_x is not None:
params['deltaX'] = delta_x
if delta_y is not None:
@@ -269,7 +463,7 @@ def emulate_touch_from_mouse_event(
type_: str,
x: int,
y: int,
- button: str,
+ button: MouseButton,
timestamp: typing.Optional[TimeSinceEpoch] = None,
delta_x: typing.Optional[float] = None,
delta_y: typing.Optional[float] = None,
@@ -284,7 +478,7 @@ def emulate_touch_from_mouse_event(
:param type_: Type of the mouse event.
:param x: X coordinate of the mouse pointer in DIP.
:param y: Y coordinate of the mouse pointer in DIP.
- :param button: Mouse button.
+ :param button: Mouse button. Only "none", "left", "right" are supported.
:param timestamp: *(Optional)* Time at which the event occurred (default: current time).
:param delta_x: *(Optional)* X delta in DIP for mouse wheel event (default: 0).
:param delta_y: *(Optional)* Y delta in DIP for mouse wheel event (default: 0).
@@ -295,7 +489,7 @@ def emulate_touch_from_mouse_event(
params['type'] = type_
params['x'] = x
params['y'] = y
- params['button'] = button
+ params['button'] = button.to_json()
if timestamp is not None:
params['timestamp'] = timestamp.to_json()
if delta_x is not None:
@@ -330,6 +524,26 @@ def set_ignore_input_events(
json = yield cmd_dict
+def set_intercept_drags(
+ enabled: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Prevents default drag and drop behavior and instead emits ``Input.dragIntercepted`` events.
+ Drag and drop behavior can be directly controlled via ``Input.dispatchDragEvent``.
+
+ **EXPERIMENTAL**
+
+ :param enabled:
+ '''
+ params: T_JSON_DICT = dict()
+ params['enabled'] = enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Input.setInterceptDrags',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def synthesize_pinch_gesture(
x: float,
y: float,
@@ -457,3 +671,21 @@ def synthesize_tap_gesture(
'params': params,
}
json = yield cmd_dict
+
+
+@event_class('Input.dragIntercepted')
+@dataclass
+class DragIntercepted:
+ '''
+ **EXPERIMENTAL**
+
+ Emitted only when ``Input.setInterceptDrags`` is enabled. Use this data with ``Input.dispatchDragEvent`` to
+ restore normal drag and drop behavior.
+ '''
+ data: DragData
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DragIntercepted:
+ return cls(
+ data=DragData.from_json(json['data'])
+ )
diff --git a/cdp/io.py b/cdp/io.py
index 91b1cb2..7cd96b1 100644
--- a/cdp/io.py
+++ b/cdp/io.py
@@ -16,7 +16,7 @@
class StreamHandle(str):
'''
- This is either obtained from another method or specifed as ``blob:<uuid>`` where
+ This is either obtained from another method or specified as ``blob:<uuid>`` where
``<uuid>`` is an UUID of a Blob.
'''
def to_json(self) -> str:
@@ -62,7 +62,7 @@ def read(
0. **base64Encoded** - *(Optional)* Set if the data is base64-encoded
1. **data** - Data that were read.
- 2. **eof** - Set if the end-of-file condition occured while reading.
+ 2. **eof** - Set if the end-of-file condition occurred while reading.
'''
params: T_JSON_DICT = dict()
params['handle'] = handle.to_json()
diff --git a/cdp/layer_tree.py b/cdp/layer_tree.py
index 7458d66..da934b4 100644
--- a/cdp/layer_tree.py
+++ b/cdp/layer_tree.py
@@ -117,7 +117,7 @@ class PictureTile:
#: Offset from owning layer top boundary
y: float
- #: Base64-encoded snapshot data.
+ #: Base64-encoded snapshot data. (Encoded as a base64 string when passed over JSON)
picture: str
def to_json(self) -> T_JSON_DICT:
@@ -258,12 +258,15 @@ def __repr__(self):
def compositing_reasons(
layer_id: LayerId
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[str]]:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[str], typing.List[str]]]:
'''
Provides the reasons why the given layer was composited.
:param layer_id: The id of the layer for which we want to get the reasons it was composited.
- :returns: A list of strings specifying reasons for the given layer to become composited.
+ :returns: A tuple with the following items:
+
+ 0. **compositingReasons** - A list of strings specifying reasons for the given layer to become composited.
+ 1. **compositingReasonIds** - A list of strings specifying reason IDs for the given layer to become composited.
'''
params: T_JSON_DICT = dict()
params['layerId'] = layer_id.to_json()
@@ -272,7 +275,10 @@ def compositing_reasons(
'params': params,
}
json = yield cmd_dict
- return [str(i) for i in json['compositingReasons']]
+ return (
+ [str(i) for i in json['compositingReasons']],
+ [str(i) for i in json['compositingReasonIds']]
+ )
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
diff --git a/cdp/log.py b/cdp/log.py
index 825a931..4932020 100644
--- a/cdp/log.py
+++ b/cdp/log.py
@@ -32,6 +32,8 @@ class LogEntry:
#: Timestamp when this entry was added.
timestamp: runtime.Timestamp
+ category: typing.Optional[str] = None
+
#: URL of the resource if known.
url: typing.Optional[str] = None
@@ -56,6 +58,8 @@ def to_json(self) -> T_JSON_DICT:
json['level'] = self.level
json['text'] = self.text
json['timestamp'] = self.timestamp.to_json()
+ if self.category is not None:
+ json['category'] = self.category
if self.url is not None:
json['url'] = self.url
if self.line_number is not None:
@@ -77,6 +81,7 @@ def from_json(cls, json: T_JSON_DICT) -> LogEntry:
level=str(json['level']),
text=str(json['text']),
timestamp=runtime.Timestamp.from_json(json['timestamp']),
+ category=str(json['category']) if 'category' in json else None,
url=str(json['url']) if 'url' in json else None,
line_number=int(json['lineNumber']) if 'lineNumber' in json else None,
stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
diff --git a/cdp/media.py b/cdp/media.py
new file mode 100644
index 0000000..3699f4c
--- /dev/null
+++ b/cdp/media.py
@@ -0,0 +1,253 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: Media (experimental)
+
+from __future__ import annotations
+from cdp.util import event_class, T_JSON_DICT
+from dataclasses import dataclass
+import enum
+import typing
+
+
+class PlayerId(str):
+ '''
+ Players will get an ID that is unique within the agent context.
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> PlayerId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'PlayerId({})'.format(super().__repr__())
+
+
+class Timestamp(float):
+ def to_json(self) -> float:
+ return self
+
+ @classmethod
+ def from_json(cls, json: float) -> Timestamp:
+ return cls(json)
+
+ def __repr__(self):
+ return 'Timestamp({})'.format(super().__repr__())
+
+
+@dataclass
+class PlayerMessage:
+ '''
+ Have one type per entry in MediaLogRecord::Type
+ Corresponds to kMessage
+ '''
+ #: Keep in sync with MediaLogMessageLevel
+ #: We are currently keeping the message level 'error' separate from the
+ #: PlayerError type because right now they represent different things,
+ #: this one being a DVLOG(ERROR) style log message that gets printed
+ #: based on what log level is selected in the UI, and the other is a
+ #: representation of a media::PipelineStatus object. Soon however we're
+ #: going to be moving away from using PipelineStatus for errors and
+ #: introducing a new error type which should hopefully let us integrate
+ #: the error log level into the PlayerError type.
+ level: str
+
+ message: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['level'] = self.level
+ json['message'] = self.message
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerMessage:
+ return cls(
+ level=str(json['level']),
+ message=str(json['message']),
+ )
+
+
+@dataclass
+class PlayerProperty:
+ '''
+ Corresponds to kMediaPropertyChange
+ '''
+ name: str
+
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerProperty:
+ return cls(
+ name=str(json['name']),
+ value=str(json['value']),
+ )
+
+
+@dataclass
+class PlayerEvent:
+ '''
+ Corresponds to kMediaEventTriggered
+ '''
+ timestamp: Timestamp
+
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['timestamp'] = self.timestamp.to_json()
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerEvent:
+ return cls(
+ timestamp=Timestamp.from_json(json['timestamp']),
+ value=str(json['value']),
+ )
+
+
+@dataclass
+class PlayerError:
+ '''
+ Corresponds to kMediaError
+ '''
+ type_: str
+
+ #: When this switches to using media::Status instead of PipelineStatus
+ #: we can remove "errorCode" and replace it with the fields from
+ #: a Status instance. This also seems like a duplicate of the error
+ #: level enum - there is a todo bug to have that level removed and
+ #: use this instead. (crbug.com/1068454)
+ error_code: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['type'] = self.type_
+ json['errorCode'] = self.error_code
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerError:
+ return cls(
+ type_=str(json['type']),
+ error_code=str(json['errorCode']),
+ )
+
+
+def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables the Media domain
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Media.enable',
+ }
+ json = yield cmd_dict
+
+
+def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Disables the Media domain.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Media.disable',
+ }
+ json = yield cmd_dict
+
+
+@event_class('Media.playerPropertiesChanged')
+@dataclass
+class PlayerPropertiesChanged:
+ '''
+ This can be called multiple times, and can be used to set / override /
+ remove player properties. A null propValue indicates removal.
+ '''
+ player_id: PlayerId
+ properties: typing.List[PlayerProperty]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerPropertiesChanged:
+ return cls(
+ player_id=PlayerId.from_json(json['playerId']),
+ properties=[PlayerProperty.from_json(i) for i in json['properties']]
+ )
+
+
+@event_class('Media.playerEventsAdded')
+@dataclass
+class PlayerEventsAdded:
+ '''
+ Send events as a list, allowing them to be batched on the browser for less
+ congestion. If batched, events must ALWAYS be in chronological order.
+ '''
+ player_id: PlayerId
+ events: typing.List[PlayerEvent]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerEventsAdded:
+ return cls(
+ player_id=PlayerId.from_json(json['playerId']),
+ events=[PlayerEvent.from_json(i) for i in json['events']]
+ )
+
+
+@event_class('Media.playerMessagesLogged')
+@dataclass
+class PlayerMessagesLogged:
+ '''
+ Send a list of any messages that need to be delivered.
+ '''
+ player_id: PlayerId
+ messages: typing.List[PlayerMessage]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerMessagesLogged:
+ return cls(
+ player_id=PlayerId.from_json(json['playerId']),
+ messages=[PlayerMessage.from_json(i) for i in json['messages']]
+ )
+
+
+@event_class('Media.playerErrorsRaised')
+@dataclass
+class PlayerErrorsRaised:
+ '''
+ Send a list of any errors that need to be delivered.
+ '''
+ player_id: PlayerId
+ errors: typing.List[PlayerError]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerErrorsRaised:
+ return cls(
+ player_id=PlayerId.from_json(json['playerId']),
+ errors=[PlayerError.from_json(i) for i in json['errors']]
+ )
+
+
+@event_class('Media.playersCreated')
+@dataclass
+class PlayersCreated:
+ '''
+ Called whenever a player is created, or when a new agent joins and receives
+ a list of active players. If an agent is restored, it will receive the full
+ list of player ids and all events again.
+ '''
+ players: typing.List[PlayerId]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayersCreated:
+ return cls(
+ players=[PlayerId.from_json(i) for i in json['players']]
+ )
diff --git a/cdp/network.py b/cdp/network.py
index 6e60c51..2826d71 100644
--- a/cdp/network.py
+++ b/cdp/network.py
@@ -12,6 +12,7 @@
import typing
from . import debugger
+from . import emulation
from . import io
from . import page
from . import runtime
@@ -38,6 +39,7 @@ class ResourceType(enum.Enum):
SIGNED_EXCHANGE = "SignedExchange"
PING = "Ping"
CSP_VIOLATION_REPORT = "CSPViolationReport"
+ PREFLIGHT = "Preflight"
OTHER = "Other"
def to_json(self) -> str:
@@ -194,7 +196,6 @@ class CookieSameSite(enum.Enum):
'''
STRICT = "Strict"
LAX = "Lax"
- EXTENDED = "Extended"
NONE = "None"
def to_json(self) -> str:
@@ -205,6 +206,41 @@ def from_json(cls, json: str) -> CookieSameSite:
return cls(json)
+class CookiePriority(enum.Enum):
+ '''
+ Represents the cookie's 'Priority' status:
+ https://tools.ietf.org/html/draft-west-cookie-priority-00
+ '''
+ LOW = "Low"
+ MEDIUM = "Medium"
+ HIGH = "High"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CookiePriority:
+ return cls(json)
+
+
+class CookieSourceScheme(enum.Enum):
+ '''
+ Represents the source scheme of the origin that originally set the cookie.
+ A value of "Unset" allows protocol clients to emulate legacy cookie scope for the scheme.
+ This is a temporary ability and it will be removed in the future.
+ '''
+ UNSET = "Unset"
+ NON_SECURE = "NonSecure"
+ SECURE = "Secure"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CookieSourceScheme:
+ return cls(json)
+
+
@dataclass
class ResourceTiming:
'''
@@ -244,6 +280,12 @@ class ResourceTiming:
#: Finished Starting ServiceWorker.
worker_ready: float
+ #: Started fetch event.
+ worker_fetch_start: float
+
+ #: Settled fetch event respondWith promise.
+ worker_respond_with_settled: float
+
#: Started sending request.
send_start: float
@@ -272,6 +314,8 @@ def to_json(self) -> T_JSON_DICT:
json['sslEnd'] = self.ssl_end
json['workerStart'] = self.worker_start
json['workerReady'] = self.worker_ready
+ json['workerFetchStart'] = self.worker_fetch_start
+ json['workerRespondWithSettled'] = self.worker_respond_with_settled
json['sendStart'] = self.send_start
json['sendEnd'] = self.send_end
json['pushStart'] = self.push_start
@@ -293,6 +337,8 @@ def from_json(cls, json: T_JSON_DICT) -> ResourceTiming:
ssl_end=float(json['sslEnd']),
worker_start=float(json['workerStart']),
worker_ready=float(json['workerReady']),
+ worker_fetch_start=float(json['workerFetchStart']),
+ worker_respond_with_settled=float(json['workerRespondWithSettled']),
send_start=float(json['sendStart']),
send_end=float(json['sendEnd']),
push_start=float(json['pushStart']),
@@ -319,6 +365,26 @@ def from_json(cls, json: str) -> ResourcePriority:
return cls(json)
+@dataclass
+class PostDataEntry:
+ '''
+ Post data entry for HTTP request
+ '''
+ bytes_: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.bytes_ is not None:
+ json['bytes'] = self.bytes_
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PostDataEntry:
+ return cls(
+ bytes_=str(json['bytes']) if 'bytes' in json else None,
+ )
+
+
@dataclass
class Request:
'''
@@ -348,12 +414,23 @@ class Request:
#: True when the request has POST data. Note that postData might still be omitted when this flag is true when the data is too long.
has_post_data: typing.Optional[bool] = None
+ #: Request body elements. This will be converted from base64 to binary
+ post_data_entries: typing.Optional[typing.List[PostDataEntry]] = None
+
#: The mixed content type of the request.
mixed_content_type: typing.Optional[security.MixedContentType] = None
#: Whether is loaded via link preload.
is_link_preload: typing.Optional[bool] = None
+ #: Set for requests when the TrustToken API is used. Contains the parameters
+ #: passed by the developer (e.g. via "fetch") as understood by the backend.
+ trust_token_params: typing.Optional[TrustTokenParams] = None
+
+ #: True if this resource request is considered to be the 'same site' as the
+ #: request correspondinfg to the main frame.
+ is_same_site: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['url'] = self.url
@@ -367,10 +444,16 @@ def to_json(self) -> T_JSON_DICT:
json['postData'] = self.post_data
if self.has_post_data is not None:
json['hasPostData'] = self.has_post_data
+ if self.post_data_entries is not None:
+ json['postDataEntries'] = [i.to_json() for i in self.post_data_entries]
if self.mixed_content_type is not None:
json['mixedContentType'] = self.mixed_content_type.to_json()
if self.is_link_preload is not None:
json['isLinkPreload'] = self.is_link_preload
+ if self.trust_token_params is not None:
+ json['trustTokenParams'] = self.trust_token_params.to_json()
+ if self.is_same_site is not None:
+ json['isSameSite'] = self.is_same_site
return json
@classmethod
@@ -384,8 +467,11 @@ def from_json(cls, json: T_JSON_DICT) -> Request:
url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None,
post_data=str(json['postData']) if 'postData' in json else None,
has_post_data=bool(json['hasPostData']) if 'hasPostData' in json else None,
+ post_data_entries=[PostDataEntry.from_json(i) for i in json['postDataEntries']] if 'postDataEntries' in json else None,
mixed_content_type=security.MixedContentType.from_json(json['mixedContentType']) if 'mixedContentType' in json else None,
is_link_preload=bool(json['isLinkPreload']) if 'isLinkPreload' in json else None,
+ trust_token_params=TrustTokenParams.from_json(json['trustTokenParams']) if 'trustTokenParams' in json else None,
+ is_same_site=bool(json['isSameSite']) if 'isSameSite' in json else None,
)
@@ -406,8 +492,9 @@ class SignedCertificateTimestamp:
#: Log ID.
log_id: str
- #: Issuance date.
- timestamp: TimeSinceEpoch
+ #: Issuance date. Unlike TimeSinceEpoch, this contains the number of
+ #: milliseconds since January 1, 1970, UTC, not the number of seconds.
+ timestamp: float
#: Hash algorithm.
hash_algorithm: str
@@ -424,7 +511,7 @@ def to_json(self) -> T_JSON_DICT:
json['origin'] = self.origin
json['logDescription'] = self.log_description
json['logId'] = self.log_id
- json['timestamp'] = self.timestamp.to_json()
+ json['timestamp'] = self.timestamp
json['hashAlgorithm'] = self.hash_algorithm
json['signatureAlgorithm'] = self.signature_algorithm
json['signatureData'] = self.signature_data
@@ -437,7 +524,7 @@ def from_json(cls, json: T_JSON_DICT) -> SignedCertificateTimestamp:
origin=str(json['origin']),
log_description=str(json['logDescription']),
log_id=str(json['logId']),
- timestamp=TimeSinceEpoch.from_json(json['timestamp']),
+ timestamp=float(json['timestamp']),
hash_algorithm=str(json['hashAlgorithm']),
signature_algorithm=str(json['signatureAlgorithm']),
signature_data=str(json['signatureData']),
@@ -553,7 +640,11 @@ class BlockedReason(enum.Enum):
INSPECTOR = "inspector"
SUBRESOURCE_FILTER = "subresource-filter"
CONTENT_TYPE = "content-type"
- COLLAPSED_BY_CLIENT = "collapsed-by-client"
+ COEP_FRAME_RESOURCE_NEEDS_COEP_HEADER = "coep-frame-resource-needs-coep-header"
+ COOP_SANDBOXED_IFRAME_CANNOT_NAVIGATE_TO_COOP_PAGE = "coop-sandboxed-iframe-cannot-navigate-to-coop-page"
+ CORP_NOT_SAME_ORIGIN = "corp-not-same-origin"
+ CORP_NOT_SAME_ORIGIN_AFTER_DEFAULTED_TO_SAME_ORIGIN_BY_COEP = "corp-not-same-origin-after-defaulted-to-same-origin-by-coep"
+ CORP_NOT_SAME_SITE = "corp-not-same-site"
def to_json(self) -> str:
return self.value
@@ -563,6 +654,133 @@ def from_json(cls, json: str) -> BlockedReason:
return cls(json)
+class CorsError(enum.Enum):
+ '''
+ The reason why request was blocked.
+ '''
+ DISALLOWED_BY_MODE = "DisallowedByMode"
+ INVALID_RESPONSE = "InvalidResponse"
+ WILDCARD_ORIGIN_NOT_ALLOWED = "WildcardOriginNotAllowed"
+ MISSING_ALLOW_ORIGIN_HEADER = "MissingAllowOriginHeader"
+ MULTIPLE_ALLOW_ORIGIN_VALUES = "MultipleAllowOriginValues"
+ INVALID_ALLOW_ORIGIN_VALUE = "InvalidAllowOriginValue"
+ ALLOW_ORIGIN_MISMATCH = "AllowOriginMismatch"
+ INVALID_ALLOW_CREDENTIALS = "InvalidAllowCredentials"
+ CORS_DISABLED_SCHEME = "CorsDisabledScheme"
+ PREFLIGHT_INVALID_STATUS = "PreflightInvalidStatus"
+ PREFLIGHT_DISALLOWED_REDIRECT = "PreflightDisallowedRedirect"
+ PREFLIGHT_WILDCARD_ORIGIN_NOT_ALLOWED = "PreflightWildcardOriginNotAllowed"
+ PREFLIGHT_MISSING_ALLOW_ORIGIN_HEADER = "PreflightMissingAllowOriginHeader"
+ PREFLIGHT_MULTIPLE_ALLOW_ORIGIN_VALUES = "PreflightMultipleAllowOriginValues"
+ PREFLIGHT_INVALID_ALLOW_ORIGIN_VALUE = "PreflightInvalidAllowOriginValue"
+ PREFLIGHT_ALLOW_ORIGIN_MISMATCH = "PreflightAllowOriginMismatch"
+ PREFLIGHT_INVALID_ALLOW_CREDENTIALS = "PreflightInvalidAllowCredentials"
+ PREFLIGHT_MISSING_ALLOW_EXTERNAL = "PreflightMissingAllowExternal"
+ PREFLIGHT_INVALID_ALLOW_EXTERNAL = "PreflightInvalidAllowExternal"
+ PREFLIGHT_MISSING_ALLOW_PRIVATE_NETWORK = "PreflightMissingAllowPrivateNetwork"
+ PREFLIGHT_INVALID_ALLOW_PRIVATE_NETWORK = "PreflightInvalidAllowPrivateNetwork"
+ INVALID_ALLOW_METHODS_PREFLIGHT_RESPONSE = "InvalidAllowMethodsPreflightResponse"
+ INVALID_ALLOW_HEADERS_PREFLIGHT_RESPONSE = "InvalidAllowHeadersPreflightResponse"
+ METHOD_DISALLOWED_BY_PREFLIGHT_RESPONSE = "MethodDisallowedByPreflightResponse"
+ HEADER_DISALLOWED_BY_PREFLIGHT_RESPONSE = "HeaderDisallowedByPreflightResponse"
+ REDIRECT_CONTAINS_CREDENTIALS = "RedirectContainsCredentials"
+ INSECURE_PRIVATE_NETWORK = "InsecurePrivateNetwork"
+ INVALID_PRIVATE_NETWORK_ACCESS = "InvalidPrivateNetworkAccess"
+ UNEXPECTED_PRIVATE_NETWORK_ACCESS = "UnexpectedPrivateNetworkAccess"
+ NO_CORS_REDIRECT_MODE_NOT_FOLLOW = "NoCorsRedirectModeNotFollow"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CorsError:
+ return cls(json)
+
+
+@dataclass
+class CorsErrorStatus:
+ cors_error: CorsError
+
+ failed_parameter: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['corsError'] = self.cors_error.to_json()
+ json['failedParameter'] = self.failed_parameter
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CorsErrorStatus:
+ return cls(
+ cors_error=CorsError.from_json(json['corsError']),
+ failed_parameter=str(json['failedParameter']),
+ )
+
+
+class ServiceWorkerResponseSource(enum.Enum):
+ '''
+ Source of serviceworker response.
+ '''
+ CACHE_STORAGE = "cache-storage"
+ HTTP_CACHE = "http-cache"
+ FALLBACK_CODE = "fallback-code"
+ NETWORK = "network"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ServiceWorkerResponseSource:
+ return cls(json)
+
+
+@dataclass
+class TrustTokenParams:
+ '''
+ Determines what type of Trust Token operation is executed and
+ depending on the type, some additional parameters. The values
+ are specified in third_party/blink/renderer/core/fetch/trust_token.idl.
+ '''
+ type_: TrustTokenOperationType
+
+ #: Only set for "token-redemption" type and determine whether
+ #: to request a fresh SRR or use a still valid cached SRR.
+ refresh_policy: str
+
+ #: Origins of issuers from whom to request tokens or redemption
+ #: records.
+ issuers: typing.Optional[typing.List[str]] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['type'] = self.type_.to_json()
+ json['refreshPolicy'] = self.refresh_policy
+ if self.issuers is not None:
+ json['issuers'] = [i for i in self.issuers]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TrustTokenParams:
+ return cls(
+ type_=TrustTokenOperationType.from_json(json['type']),
+ refresh_policy=str(json['refreshPolicy']),
+ issuers=[str(i) for i in json['issuers']] if 'issuers' in json else None,
+ )
+
+
+class TrustTokenOperationType(enum.Enum):
+ ISSUANCE = "Issuance"
+ REDEMPTION = "Redemption"
+ SIGNING = "Signing"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> TrustTokenOperationType:
+ return cls(json)
+
+
@dataclass
class Response:
'''
@@ -595,13 +813,13 @@ class Response:
#: Security state of the request resource.
security_state: security.SecurityState
- #: HTTP response headers text.
+ #: HTTP response headers text. This has been replaced by the headers in Network.responseReceivedExtraInfo.
headers_text: typing.Optional[str] = None
#: Refined HTTP request headers that were actually transmitted over the network.
request_headers: typing.Optional[Headers] = None
- #: HTTP request headers text.
+ #: HTTP request headers text. This has been replaced by the headers in Network.requestWillBeSentExtraInfo.
request_headers_text: typing.Optional[str] = None
#: Remote IP address.
@@ -622,6 +840,15 @@ class Response:
#: Timing information for the given request.
timing: typing.Optional[ResourceTiming] = None
+ #: Response source of response from ServiceWorker.
+ service_worker_response_source: typing.Optional[ServiceWorkerResponseSource] = None
+
+ #: The time at which the returned response was generated.
+ response_time: typing.Optional[TimeSinceEpoch] = None
+
+ #: Cache Storage Cache Name.
+ cache_storage_cache_name: typing.Optional[str] = None
+
#: Protocol used to fetch this request.
protocol: typing.Optional[str] = None
@@ -657,6 +884,12 @@ def to_json(self) -> T_JSON_DICT:
json['fromPrefetchCache'] = self.from_prefetch_cache
if self.timing is not None:
json['timing'] = self.timing.to_json()
+ if self.service_worker_response_source is not None:
+ json['serviceWorkerResponseSource'] = self.service_worker_response_source.to_json()
+ if self.response_time is not None:
+ json['responseTime'] = self.response_time.to_json()
+ if self.cache_storage_cache_name is not None:
+ json['cacheStorageCacheName'] = self.cache_storage_cache_name
if self.protocol is not None:
json['protocol'] = self.protocol
if self.security_details is not None:
@@ -684,6 +917,9 @@ def from_json(cls, json: T_JSON_DICT) -> Response:
from_service_worker=bool(json['fromServiceWorker']) if 'fromServiceWorker' in json else None,
from_prefetch_cache=bool(json['fromPrefetchCache']) if 'fromPrefetchCache' in json else None,
timing=ResourceTiming.from_json(json['timing']) if 'timing' in json else None,
+ service_worker_response_source=ServiceWorkerResponseSource.from_json(json['serviceWorkerResponseSource']) if 'serviceWorkerResponseSource' in json else None,
+ response_time=TimeSinceEpoch.from_json(json['responseTime']) if 'responseTime' in json else None,
+ cache_storage_cache_name=str(json['cacheStorageCacheName']) if 'cacheStorageCacheName' in json else None,
protocol=str(json['protocol']) if 'protocol' in json else None,
security_details=SecurityDetails.from_json(json['securityDetails']) if 'securityDetails' in json else None,
)
@@ -843,6 +1079,13 @@ class Initiator:
#: module) (0-based).
line_number: typing.Optional[float] = None
+ #: Initiator column number, set for Parser type or for Script type (when script is importing
+ #: module) (0-based).
+ column_number: typing.Optional[float] = None
+
+ #: Set if another request triggered this request (e.g. preflight).
+ request_id: typing.Optional[RequestId] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['type'] = self.type_
@@ -852,6 +1095,10 @@ def to_json(self) -> T_JSON_DICT:
json['url'] = self.url
if self.line_number is not None:
json['lineNumber'] = self.line_number
+ if self.column_number is not None:
+ json['columnNumber'] = self.column_number
+ if self.request_id is not None:
+ json['requestId'] = self.request_id.to_json()
return json
@classmethod
@@ -861,6 +1108,8 @@ def from_json(cls, json: T_JSON_DICT) -> Initiator:
stack=runtime.StackTrace.from_json(json['stack']) if 'stack' in json else None,
url=str(json['url']) if 'url' in json else None,
line_number=float(json['lineNumber']) if 'lineNumber' in json else None,
+ column_number=float(json['columnNumber']) if 'columnNumber' in json else None,
+ request_id=RequestId.from_json(json['requestId']) if 'requestId' in json else None,
)
@@ -896,9 +1145,30 @@ class Cookie:
#: True in case of session cookie.
session: bool
+ #: Cookie Priority
+ priority: CookiePriority
+
+ #: True if cookie is SameParty.
+ same_party: bool
+
+ #: Cookie source scheme type.
+ source_scheme: CookieSourceScheme
+
+ #: Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port.
+ #: An unspecified port value allows protocol clients to emulate legacy cookie scope for the port.
+ #: This is a temporary ability and it will be removed in the future.
+ source_port: int
+
#: Cookie SameSite type.
same_site: typing.Optional[CookieSameSite] = None
+ #: Cookie partition key. The site of the top-level URL the browser was visiting at the start
+ #: of the request to the endpoint that set the cookie.
+ partition_key: typing.Optional[str] = None
+
+ #: True if cookie partition key is opaque.
+ partition_key_opaque: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['name'] = self.name
@@ -910,8 +1180,16 @@ def to_json(self) -> T_JSON_DICT:
json['httpOnly'] = self.http_only
json['secure'] = self.secure
json['session'] = self.session
+ json['priority'] = self.priority.to_json()
+ json['sameParty'] = self.same_party
+ json['sourceScheme'] = self.source_scheme.to_json()
+ json['sourcePort'] = self.source_port
if self.same_site is not None:
json['sameSite'] = self.same_site.to_json()
+ if self.partition_key is not None:
+ json['partitionKey'] = self.partition_key
+ if self.partition_key_opaque is not None:
+ json['partitionKeyOpaque'] = self.partition_key_opaque
return json
@classmethod
@@ -926,7 +1204,13 @@ def from_json(cls, json: T_JSON_DICT) -> Cookie:
http_only=bool(json['httpOnly']),
secure=bool(json['secure']),
session=bool(json['session']),
+ priority=CookiePriority.from_json(json['priority']),
+ same_party=bool(json['sameParty']),
+ source_scheme=CookieSourceScheme.from_json(json['sourceScheme']),
+ source_port=int(json['sourcePort']),
same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None,
+ partition_key=str(json['partitionKey']) if 'partitionKey' in json else None,
+ partition_key_opaque=bool(json['partitionKeyOpaque']) if 'partitionKeyOpaque' in json else None,
)
@@ -937,7 +1221,6 @@ class SetCookieBlockedReason(enum.Enum):
SECURE_ONLY = "SecureOnly"
SAME_SITE_STRICT = "SameSiteStrict"
SAME_SITE_LAX = "SameSiteLax"
- SAME_SITE_EXTENDED = "SameSiteExtended"
SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax"
SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure"
USER_PREFERENCES = "UserPreferences"
@@ -947,6 +1230,12 @@ class SetCookieBlockedReason(enum.Enum):
INVALID_DOMAIN = "InvalidDomain"
INVALID_PREFIX = "InvalidPrefix"
UNKNOWN_ERROR = "UnknownError"
+ SCHEMEFUL_SAME_SITE_STRICT = "SchemefulSameSiteStrict"
+ SCHEMEFUL_SAME_SITE_LAX = "SchemefulSameSiteLax"
+ SCHEMEFUL_SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SchemefulSameSiteUnspecifiedTreatedAsLax"
+ SAME_PARTY_FROM_CROSS_PARTY_CONTEXT = "SamePartyFromCrossPartyContext"
+ SAME_PARTY_CONFLICTS_WITH_OTHER_ATTRIBUTES = "SamePartyConflictsWithOtherAttributes"
+ NAME_VALUE_PAIR_EXCEEDS_MAX_SIZE = "NameValuePairExceedsMaxSize"
def to_json(self) -> str:
return self.value
@@ -965,11 +1254,15 @@ class CookieBlockedReason(enum.Enum):
DOMAIN_MISMATCH = "DomainMismatch"
SAME_SITE_STRICT = "SameSiteStrict"
SAME_SITE_LAX = "SameSiteLax"
- SAME_SITE_EXTENDED = "SameSiteExtended"
SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax"
SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure"
USER_PREFERENCES = "UserPreferences"
UNKNOWN_ERROR = "UnknownError"
+ SCHEMEFUL_SAME_SITE_STRICT = "SchemefulSameSiteStrict"
+ SCHEMEFUL_SAME_SITE_LAX = "SchemefulSameSiteLax"
+ SCHEMEFUL_SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SchemefulSameSiteUnspecifiedTreatedAsLax"
+ SAME_PARTY_FROM_CROSS_PARTY_CONTEXT = "SamePartyFromCrossPartyContext"
+ NAME_VALUE_PAIR_EXCEEDS_MAX_SIZE = "NameValuePairExceedsMaxSize"
def to_json(self) -> str:
return self.value
@@ -984,8 +1277,8 @@ class BlockedSetCookieWithReason:
'''
A cookie which was not stored from a response with the corresponding reason.
'''
- #: The reason this cookie was blocked.
- blocked_reason: SetCookieBlockedReason
+ #: The reason(s) this cookie was blocked.
+ blocked_reasons: typing.List[SetCookieBlockedReason]
#: The string representing this individual cookie as it would appear in the header.
#: This is not the entire "cookie" or "set-cookie" header which could have multiple cookies.
@@ -998,7 +1291,7 @@ class BlockedSetCookieWithReason:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['blockedReason'] = self.blocked_reason.to_json()
+ json['blockedReasons'] = [i.to_json() for i in self.blocked_reasons]
json['cookieLine'] = self.cookie_line
if self.cookie is not None:
json['cookie'] = self.cookie.to_json()
@@ -1007,7 +1300,7 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> BlockedSetCookieWithReason:
return cls(
- blocked_reason=SetCookieBlockedReason.from_json(json['blockedReason']),
+ blocked_reasons=[SetCookieBlockedReason.from_json(i) for i in json['blockedReasons']],
cookie_line=str(json['cookieLine']),
cookie=Cookie.from_json(json['cookie']) if 'cookie' in json else None,
)
@@ -1018,22 +1311,22 @@ class BlockedCookieWithReason:
'''
A cookie with was not sent with a request with the corresponding reason.
'''
- #: The reason the cookie was blocked.
- blocked_reason: CookieBlockedReason
+ #: The reason(s) the cookie was blocked.
+ blocked_reasons: typing.List[CookieBlockedReason]
#: The cookie object representing the cookie which was not sent.
cookie: Cookie
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['blockedReason'] = self.blocked_reason.to_json()
+ json['blockedReasons'] = [i.to_json() for i in self.blocked_reasons]
json['cookie'] = self.cookie.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> BlockedCookieWithReason:
return cls(
- blocked_reason=CookieBlockedReason.from_json(json['blockedReason']),
+ blocked_reasons=[CookieBlockedReason.from_json(i) for i in json['blockedReasons']],
cookie=Cookie.from_json(json['cookie']),
)
@@ -1050,7 +1343,7 @@ class CookieParam:
value: str
#: The request-URI to associate with the setting of the cookie. This value can affect the
- #: default domain and path values of the created cookie.
+ #: default domain, path, source port, and source scheme values of the created cookie.
url: typing.Optional[str] = None
#: Cookie domain.
@@ -1071,6 +1364,25 @@ class CookieParam:
#: Cookie expiration date, session cookie if not set
expires: typing.Optional[TimeSinceEpoch] = None
+ #: Cookie Priority.
+ priority: typing.Optional[CookiePriority] = None
+
+ #: True if cookie is SameParty.
+ same_party: typing.Optional[bool] = None
+
+ #: Cookie source scheme type.
+ source_scheme: typing.Optional[CookieSourceScheme] = None
+
+ #: Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port.
+ #: An unspecified port value allows protocol clients to emulate legacy cookie scope for the port.
+ #: This is a temporary ability and it will be removed in the future.
+ source_port: typing.Optional[int] = None
+
+ #: Cookie partition key. The site of the top-level URL the browser was visiting at the start
+ #: of the request to the endpoint that set the cookie.
+ #: If not set, the cookie will be set as not partitioned.
+ partition_key: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['name'] = self.name
@@ -1089,6 +1401,16 @@ def to_json(self) -> T_JSON_DICT:
json['sameSite'] = self.same_site.to_json()
if self.expires is not None:
json['expires'] = self.expires.to_json()
+ if self.priority is not None:
+ json['priority'] = self.priority.to_json()
+ if self.same_party is not None:
+ json['sameParty'] = self.same_party
+ if self.source_scheme is not None:
+ json['sourceScheme'] = self.source_scheme.to_json()
+ if self.source_port is not None:
+ json['sourcePort'] = self.source_port
+ if self.partition_key is not None:
+ json['partitionKey'] = self.partition_key
return json
@classmethod
@@ -1103,6 +1425,11 @@ def from_json(cls, json: T_JSON_DICT) -> CookieParam:
http_only=bool(json['httpOnly']) if 'httpOnly' in json else None,
same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None,
expires=TimeSinceEpoch.from_json(json['expires']) if 'expires' in json else None,
+ priority=CookiePriority.from_json(json['priority']) if 'priority' in json else None,
+ same_party=bool(json['sameParty']) if 'sameParty' in json else None,
+ source_scheme=CookieSourceScheme.from_json(json['sourceScheme']) if 'sourceScheme' in json else None,
+ source_port=int(json['sourcePort']) if 'sourcePort' in json else None,
+ partition_key=str(json['partitionKey']) if 'partitionKey' in json else None,
)
@@ -1199,14 +1526,14 @@ class RequestPattern:
'''
Request pattern for interception.
'''
- #: Wildcards ('*' -> zero or more, '?' -> exactly one) are allowed. Escape character is
- #: backslash. Omitting is equivalent to "*".
+ #: Wildcards (``'*'`` -> zero or more, ``'?'`` -> exactly one) are allowed. Escape character is
+ #: backslash. Omitting is equivalent to ``"*"``.
url_pattern: typing.Optional[str] = None
#: If set, only requests for matching resource types will be intercepted.
resource_type: typing.Optional[ResourceType] = None
- #: Stage at wich to begin intercepting requests. Default is Request.
+ #: Stage at which to begin intercepting requests. Default is Request.
interception_stage: typing.Optional[InterceptionStage] = None
def to_json(self) -> T_JSON_DICT:
@@ -1422,138 +1749,546 @@ def from_json(cls, json: T_JSON_DICT) -> SignedExchangeInfo:
)
-@deprecated(version="1.3")
-def can_clear_browser_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+class ContentEncoding(enum.Enum):
'''
- Tells whether clearing browser cache is supported.
-
- .. deprecated:: 1.3
-
- :returns: True if browser cache can be cleared.
+ List of content encodings supported by the backend.
'''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.canClearBrowserCache',
- }
- json = yield cmd_dict
- return bool(json['result'])
+ DEFLATE = "deflate"
+ GZIP = "gzip"
+ BR = "br"
+ def to_json(self) -> str:
+ return self.value
-@deprecated(version="1.3")
-def can_clear_browser_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
- '''
- Tells whether clearing browser cookies is supported.
+ @classmethod
+ def from_json(cls, json: str) -> ContentEncoding:
+ return cls(json)
- .. deprecated:: 1.3
- :returns: True if browser cookies can be cleared.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.canClearBrowserCookies',
- }
- json = yield cmd_dict
- return bool(json['result'])
+class PrivateNetworkRequestPolicy(enum.Enum):
+ ALLOW = "Allow"
+ BLOCK_FROM_INSECURE_TO_MORE_PRIVATE = "BlockFromInsecureToMorePrivate"
+ WARN_FROM_INSECURE_TO_MORE_PRIVATE = "WarnFromInsecureToMorePrivate"
+ PREFLIGHT_BLOCK = "PreflightBlock"
+ PREFLIGHT_WARN = "PreflightWarn"
+ def to_json(self) -> str:
+ return self.value
-@deprecated(version="1.3")
-def can_emulate_network_conditions() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
- '''
- Tells whether emulation of network conditions is supported.
+ @classmethod
+ def from_json(cls, json: str) -> PrivateNetworkRequestPolicy:
+ return cls(json)
- .. deprecated:: 1.3
- :returns: True if emulation of network conditions is supported.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.canEmulateNetworkConditions',
- }
- json = yield cmd_dict
- return bool(json['result'])
+class IPAddressSpace(enum.Enum):
+ LOCAL = "Local"
+ PRIVATE = "Private"
+ PUBLIC = "Public"
+ UNKNOWN = "Unknown"
+ def to_json(self) -> str:
+ return self.value
-def clear_browser_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Clears browser cache.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.clearBrowserCache',
- }
- json = yield cmd_dict
+ @classmethod
+ def from_json(cls, json: str) -> IPAddressSpace:
+ return cls(json)
-def clear_browser_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Clears browser cookies.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.clearBrowserCookies',
- }
- json = yield cmd_dict
+@dataclass
+class ConnectTiming:
+ #: Timing's requestTime is a baseline in seconds, while the other numbers are ticks in
+ #: milliseconds relatively to this requestTime. Matches ResourceTiming's requestTime for
+ #: the same request (but not for redirected requests).
+ request_time: float
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['requestTime'] = self.request_time
+ return json
-@deprecated(version="1.3")
-def continue_intercepted_request(
- interception_id: InterceptionId,
- error_reason: typing.Optional[ErrorReason] = None,
- raw_response: typing.Optional[str] = None,
- url: typing.Optional[str] = None,
- method: typing.Optional[str] = None,
- post_data: typing.Optional[str] = None,
- headers: typing.Optional[Headers] = None,
- auth_challenge_response: typing.Optional[AuthChallengeResponse] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Response to Network.requestIntercepted which either modifies the request to continue with any
- modifications, or blocks it, or completes it with the provided response bytes. If a network
- fetch occurs as a result which encounters a redirect an additional Network.requestIntercepted
- event will be sent with the same InterceptionId.
- Deprecated, use Fetch.continueRequest, Fetch.fulfillRequest and Fetch.failRequest instead.
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ConnectTiming:
+ return cls(
+ request_time=float(json['requestTime']),
+ )
- .. deprecated:: 1.3
- **EXPERIMENTAL**
+@dataclass
+class ClientSecurityState:
+ initiator_is_secure_context: bool
- :param interception_id:
- :param error_reason: *(Optional)* If set this causes the request to fail with the given reason. Passing ```Aborted```` for requests marked with ````isNavigationRequest``` also cancels the navigation. Must not be set in response to an authChallenge.
- :param raw_response: *(Optional)* If set the requests completes using with the provided base64 encoded raw response, including HTTP status line and headers etc... Must not be set in response to an authChallenge.
- :param url: *(Optional)* If set the request url will be modified in a way that's not observable by page. Must not be set in response to an authChallenge.
- :param method: *(Optional)* If set this allows the request method to be overridden. Must not be set in response to an authChallenge.
- :param post_data: *(Optional)* If set this allows postData to be set. Must not be set in response to an authChallenge.
- :param headers: *(Optional)* If set this allows the request headers to be changed. Must not be set in response to an authChallenge.
- :param auth_challenge_response: *(Optional)* Response to a requestIntercepted with an authChallenge. Must not be set otherwise.
- '''
- params: T_JSON_DICT = dict()
- params['interceptionId'] = interception_id.to_json()
- if error_reason is not None:
- params['errorReason'] = error_reason.to_json()
- if raw_response is not None:
- params['rawResponse'] = raw_response
- if url is not None:
- params['url'] = url
- if method is not None:
- params['method'] = method
- if post_data is not None:
- params['postData'] = post_data
- if headers is not None:
- params['headers'] = headers.to_json()
- if auth_challenge_response is not None:
- params['authChallengeResponse'] = auth_challenge_response.to_json()
- cmd_dict: T_JSON_DICT = {
- 'method': 'Network.continueInterceptedRequest',
- 'params': params,
- }
- json = yield cmd_dict
+ initiator_ip_address_space: IPAddressSpace
+ private_network_request_policy: PrivateNetworkRequestPolicy
-def delete_cookies(
- name: str,
- url: typing.Optional[str] = None,
- domain: typing.Optional[str] = None,
- path: typing.Optional[str] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Deletes browser cookies with matching name and url or domain/path pair.
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['initiatorIsSecureContext'] = self.initiator_is_secure_context
+ json['initiatorIPAddressSpace'] = self.initiator_ip_address_space.to_json()
+ json['privateNetworkRequestPolicy'] = self.private_network_request_policy.to_json()
+ return json
- :param name: Name of the cookies to remove.
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ClientSecurityState:
+ return cls(
+ initiator_is_secure_context=bool(json['initiatorIsSecureContext']),
+ initiator_ip_address_space=IPAddressSpace.from_json(json['initiatorIPAddressSpace']),
+ private_network_request_policy=PrivateNetworkRequestPolicy.from_json(json['privateNetworkRequestPolicy']),
+ )
+
+
+class CrossOriginOpenerPolicyValue(enum.Enum):
+ SAME_ORIGIN = "SameOrigin"
+ SAME_ORIGIN_ALLOW_POPUPS = "SameOriginAllowPopups"
+ UNSAFE_NONE = "UnsafeNone"
+ SAME_ORIGIN_PLUS_COEP = "SameOriginPlusCoep"
+ SAME_ORIGIN_ALLOW_POPUPS_PLUS_COEP = "SameOriginAllowPopupsPlusCoep"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CrossOriginOpenerPolicyValue:
+ return cls(json)
+
+
+@dataclass
+class CrossOriginOpenerPolicyStatus:
+ value: CrossOriginOpenerPolicyValue
+
+ report_only_value: CrossOriginOpenerPolicyValue
+
+ reporting_endpoint: typing.Optional[str] = None
+
+ report_only_reporting_endpoint: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['value'] = self.value.to_json()
+ json['reportOnlyValue'] = self.report_only_value.to_json()
+ if self.reporting_endpoint is not None:
+ json['reportingEndpoint'] = self.reporting_endpoint
+ if self.report_only_reporting_endpoint is not None:
+ json['reportOnlyReportingEndpoint'] = self.report_only_reporting_endpoint
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CrossOriginOpenerPolicyStatus:
+ return cls(
+ value=CrossOriginOpenerPolicyValue.from_json(json['value']),
+ report_only_value=CrossOriginOpenerPolicyValue.from_json(json['reportOnlyValue']),
+ reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None,
+ report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None,
+ )
+
+
+class CrossOriginEmbedderPolicyValue(enum.Enum):
+ NONE = "None"
+ CREDENTIALLESS = "Credentialless"
+ REQUIRE_CORP = "RequireCorp"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CrossOriginEmbedderPolicyValue:
+ return cls(json)
+
+
+@dataclass
+class CrossOriginEmbedderPolicyStatus:
+ value: CrossOriginEmbedderPolicyValue
+
+ report_only_value: CrossOriginEmbedderPolicyValue
+
+ reporting_endpoint: typing.Optional[str] = None
+
+ report_only_reporting_endpoint: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['value'] = self.value.to_json()
+ json['reportOnlyValue'] = self.report_only_value.to_json()
+ if self.reporting_endpoint is not None:
+ json['reportingEndpoint'] = self.reporting_endpoint
+ if self.report_only_reporting_endpoint is not None:
+ json['reportOnlyReportingEndpoint'] = self.report_only_reporting_endpoint
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CrossOriginEmbedderPolicyStatus:
+ return cls(
+ value=CrossOriginEmbedderPolicyValue.from_json(json['value']),
+ report_only_value=CrossOriginEmbedderPolicyValue.from_json(json['reportOnlyValue']),
+ reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None,
+ report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None,
+ )
+
+
+@dataclass
+class SecurityIsolationStatus:
+ coop: typing.Optional[CrossOriginOpenerPolicyStatus] = None
+
+ coep: typing.Optional[CrossOriginEmbedderPolicyStatus] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.coop is not None:
+ json['coop'] = self.coop.to_json()
+ if self.coep is not None:
+ json['coep'] = self.coep.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SecurityIsolationStatus:
+ return cls(
+ coop=CrossOriginOpenerPolicyStatus.from_json(json['coop']) if 'coop' in json else None,
+ coep=CrossOriginEmbedderPolicyStatus.from_json(json['coep']) if 'coep' in json else None,
+ )
+
+
+class ReportStatus(enum.Enum):
+ '''
+ The status of a Reporting API report.
+ '''
+ QUEUED = "Queued"
+ PENDING = "Pending"
+ MARKED_FOR_REMOVAL = "MarkedForRemoval"
+ SUCCESS = "Success"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ReportStatus:
+ return cls(json)
+
+
+class ReportId(str):
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> ReportId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'ReportId({})'.format(super().__repr__())
+
+
+@dataclass
+class ReportingApiReport:
+ '''
+ An object representing a report generated by the Reporting API.
+ '''
+ id_: ReportId
+
+ #: The URL of the document that triggered the report.
+ initiator_url: str
+
+ #: The name of the endpoint group that should be used to deliver the report.
+ destination: str
+
+ #: The type of the report (specifies the set of data that is contained in the report body).
+ type_: str
+
+ #: When the report was generated.
+ timestamp: TimeSinceEpoch
+
+ #: How many uploads deep the related request was.
+ depth: int
+
+ #: The number of delivery attempts made so far, not including an active attempt.
+ completed_attempts: int
+
+ body: dict
+
+ status: ReportStatus
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['id'] = self.id_.to_json()
+ json['initiatorUrl'] = self.initiator_url
+ json['destination'] = self.destination
+ json['type'] = self.type_
+ json['timestamp'] = self.timestamp.to_json()
+ json['depth'] = self.depth
+ json['completedAttempts'] = self.completed_attempts
+ json['body'] = self.body
+ json['status'] = self.status.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ReportingApiReport:
+ return cls(
+ id_=ReportId.from_json(json['id']),
+ initiator_url=str(json['initiatorUrl']),
+ destination=str(json['destination']),
+ type_=str(json['type']),
+ timestamp=TimeSinceEpoch.from_json(json['timestamp']),
+ depth=int(json['depth']),
+ completed_attempts=int(json['completedAttempts']),
+ body=dict(json['body']),
+ status=ReportStatus.from_json(json['status']),
+ )
+
+
+@dataclass
+class ReportingApiEndpoint:
+ #: The URL of the endpoint to which reports may be delivered.
+ url: str
+
+ #: Name of the endpoint group.
+ group_name: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ json['groupName'] = self.group_name
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ReportingApiEndpoint:
+ return cls(
+ url=str(json['url']),
+ group_name=str(json['groupName']),
+ )
+
+
+@dataclass
+class LoadNetworkResourcePageResult:
+ '''
+ An object providing the result of a network resource load.
+ '''
+ success: bool
+
+ #: Optional values used for error reporting.
+ net_error: typing.Optional[float] = None
+
+ net_error_name: typing.Optional[str] = None
+
+ http_status_code: typing.Optional[float] = None
+
+ #: If successful, one of the following two fields holds the result.
+ stream: typing.Optional[io.StreamHandle] = None
+
+ #: Response headers.
+ headers: typing.Optional[Headers] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['success'] = self.success
+ if self.net_error is not None:
+ json['netError'] = self.net_error
+ if self.net_error_name is not None:
+ json['netErrorName'] = self.net_error_name
+ if self.http_status_code is not None:
+ json['httpStatusCode'] = self.http_status_code
+ if self.stream is not None:
+ json['stream'] = self.stream.to_json()
+ if self.headers is not None:
+ json['headers'] = self.headers.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LoadNetworkResourcePageResult:
+ return cls(
+ success=bool(json['success']),
+ net_error=float(json['netError']) if 'netError' in json else None,
+ net_error_name=str(json['netErrorName']) if 'netErrorName' in json else None,
+ http_status_code=float(json['httpStatusCode']) if 'httpStatusCode' in json else None,
+ stream=io.StreamHandle.from_json(json['stream']) if 'stream' in json else None,
+ headers=Headers.from_json(json['headers']) if 'headers' in json else None,
+ )
+
+
+@dataclass
+class LoadNetworkResourceOptions:
+ '''
+ An options object that may be extended later to better support CORS,
+ CORB and streaming.
+ '''
+ disable_cache: bool
+
+ include_credentials: bool
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['disableCache'] = self.disable_cache
+ json['includeCredentials'] = self.include_credentials
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LoadNetworkResourceOptions:
+ return cls(
+ disable_cache=bool(json['disableCache']),
+ include_credentials=bool(json['includeCredentials']),
+ )
+
+
+def set_accepted_encodings(
+ encodings: typing.List[ContentEncoding]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets a list of content encodings that will be accepted. Empty list means no encoding is accepted.
+
+ **EXPERIMENTAL**
+
+ :param encodings: List of accepted content encodings.
+ '''
+ params: T_JSON_DICT = dict()
+ params['encodings'] = [i.to_json() for i in encodings]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.setAcceptedEncodings',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def clear_accepted_encodings_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears accepted encodings set by setAcceptedEncodings
+
+ **EXPERIMENTAL**
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.clearAcceptedEncodingsOverride',
+ }
+ json = yield cmd_dict
+
+
+@deprecated(version="1.3")
+def can_clear_browser_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+ '''
+ Tells whether clearing browser cache is supported.
+
+ .. deprecated:: 1.3
+
+ :returns: True if browser cache can be cleared.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.canClearBrowserCache',
+ }
+ json = yield cmd_dict
+ return bool(json['result'])
+
+
+@deprecated(version="1.3")
+def can_clear_browser_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+ '''
+ Tells whether clearing browser cookies is supported.
+
+ .. deprecated:: 1.3
+
+ :returns: True if browser cookies can be cleared.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.canClearBrowserCookies',
+ }
+ json = yield cmd_dict
+ return bool(json['result'])
+
+
+@deprecated(version="1.3")
+def can_emulate_network_conditions() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+ '''
+ Tells whether emulation of network conditions is supported.
+
+ .. deprecated:: 1.3
+
+ :returns: True if emulation of network conditions is supported.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.canEmulateNetworkConditions',
+ }
+ json = yield cmd_dict
+ return bool(json['result'])
+
+
+def clear_browser_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears browser cache.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.clearBrowserCache',
+ }
+ json = yield cmd_dict
+
+
+def clear_browser_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears browser cookies.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.clearBrowserCookies',
+ }
+ json = yield cmd_dict
+
+
+@deprecated(version="1.3")
+def continue_intercepted_request(
+ interception_id: InterceptionId,
+ error_reason: typing.Optional[ErrorReason] = None,
+ raw_response: typing.Optional[str] = None,
+ url: typing.Optional[str] = None,
+ method: typing.Optional[str] = None,
+ post_data: typing.Optional[str] = None,
+ headers: typing.Optional[Headers] = None,
+ auth_challenge_response: typing.Optional[AuthChallengeResponse] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Response to Network.requestIntercepted which either modifies the request to continue with any
+ modifications, or blocks it, or completes it with the provided response bytes. If a network
+ fetch occurs as a result which encounters a redirect an additional Network.requestIntercepted
+ event will be sent with the same InterceptionId.
+ Deprecated, use Fetch.continueRequest, Fetch.fulfillRequest and Fetch.failRequest instead.
+
+ .. deprecated:: 1.3
+
+ **EXPERIMENTAL**
+
+ :param interception_id:
+ :param error_reason: *(Optional)* If set this causes the request to fail with the given reason. Passing ```Aborted```` for requests marked with ````isNavigationRequest``` also cancels the navigation. Must not be set in response to an authChallenge.
+ :param raw_response: *(Optional)* If set the requests completes using with the provided base64 encoded raw response, including HTTP status line and headers etc... Must not be set in response to an authChallenge. (Encoded as a base64 string when passed over JSON)
+ :param url: *(Optional)* If set the request url will be modified in a way that's not observable by page. Must not be set in response to an authChallenge.
+ :param method: *(Optional)* If set this allows the request method to be overridden. Must not be set in response to an authChallenge.
+ :param post_data: *(Optional)* If set this allows postData to be set. Must not be set in response to an authChallenge.
+ :param headers: *(Optional)* If set this allows the request headers to be changed. Must not be set in response to an authChallenge.
+ :param auth_challenge_response: *(Optional)* Response to a requestIntercepted with an authChallenge. Must not be set otherwise.
+ '''
+ params: T_JSON_DICT = dict()
+ params['interceptionId'] = interception_id.to_json()
+ if error_reason is not None:
+ params['errorReason'] = error_reason.to_json()
+ if raw_response is not None:
+ params['rawResponse'] = raw_response
+ if url is not None:
+ params['url'] = url
+ if method is not None:
+ params['method'] = method
+ if post_data is not None:
+ params['postData'] = post_data
+ if headers is not None:
+ params['headers'] = headers.to_json()
+ if auth_challenge_response is not None:
+ params['authChallengeResponse'] = auth_challenge_response.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.continueInterceptedRequest',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def delete_cookies(
+ name: str,
+ url: typing.Optional[str] = None,
+ domain: typing.Optional[str] = None,
+ path: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Deletes browser cookies with matching name and url or domain/path pair.
+
+ :param name: Name of the cookies to remove.
:param url: *(Optional)* If specified, deletes all the cookies with the given name where domain and path match provided URL.
:param domain: *(Optional)* If specified, deletes only cookies with the exact domain.
:param path: *(Optional)* If specified, deletes only cookies with the exact path.
@@ -1681,7 +2416,7 @@ def get_cookies(
Returns all browser cookies for the current URL. Depending on the backend support, will return
detailed cookie information in the ``cookies`` field.
- :param urls: *(Optional)* The list of URLs for which applicable cookies will be fetched
+ :param urls: *(Optional)* The list of URLs for which applicable cookies will be fetched. If not specified, it's assumed to be set to the list containing the URLs of the page and all of its subframes.
:returns: Array of cookie objects.
'''
params: T_JSON_DICT = dict()
@@ -1907,21 +2642,31 @@ def set_cookie(
secure: typing.Optional[bool] = None,
http_only: typing.Optional[bool] = None,
same_site: typing.Optional[CookieSameSite] = None,
- expires: typing.Optional[TimeSinceEpoch] = None
+ expires: typing.Optional[TimeSinceEpoch] = None,
+ priority: typing.Optional[CookiePriority] = None,
+ same_party: typing.Optional[bool] = None,
+ source_scheme: typing.Optional[CookieSourceScheme] = None,
+ source_port: typing.Optional[int] = None,
+ partition_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
'''
Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.
:param name: Cookie name.
:param value: Cookie value.
- :param url: *(Optional)* The request-URI to associate with the setting of the cookie. This value can affect the default domain and path values of the created cookie.
+ :param url: *(Optional)* The request-URI to associate with the setting of the cookie. This value can affect the default domain, path, source port, and source scheme values of the created cookie.
:param domain: *(Optional)* Cookie domain.
:param path: *(Optional)* Cookie path.
:param secure: *(Optional)* True if cookie is secure.
:param http_only: *(Optional)* True if cookie is http-only.
:param same_site: *(Optional)* Cookie SameSite type.
:param expires: *(Optional)* Cookie expiration date, session cookie if not set
- :returns: True if successfully set cookie.
+ :param priority: **(EXPERIMENTAL)** *(Optional)* Cookie Priority type.
+ :param same_party: **(EXPERIMENTAL)** *(Optional)* True if cookie is SameParty.
+ :param source_scheme: **(EXPERIMENTAL)** *(Optional)* Cookie source scheme type.
+ :param source_port: **(EXPERIMENTAL)** *(Optional)* Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port. An unspecified port value allows protocol clients to emulate legacy cookie scope for the port. This is a temporary ability and it will be removed in the future.
+ :param partition_key: **(EXPERIMENTAL)** *(Optional)* Cookie partition key. The site of the top-level URL the browser was visiting at the start of the request to the endpoint that set the cookie. If not set, the cookie will be set as not partitioned.
+ :returns: Always set to true. If an error occurs, the response indicates protocol error.
'''
params: T_JSON_DICT = dict()
params['name'] = name
@@ -1940,6 +2685,16 @@ def set_cookie(
params['sameSite'] = same_site.to_json()
if expires is not None:
params['expires'] = expires.to_json()
+ if priority is not None:
+ params['priority'] = priority.to_json()
+ if same_party is not None:
+ params['sameParty'] = same_party
+ if source_scheme is not None:
+ params['sourceScheme'] = source_scheme.to_json()
+ if source_port is not None:
+ params['sourcePort'] = source_port
+ if partition_key is not None:
+ params['partitionKey'] = partition_key
cmd_dict: T_JSON_DICT = {
'method': 'Network.setCookie',
'params': params,
@@ -1965,40 +2720,37 @@ def set_cookies(
json = yield cmd_dict
-def set_data_size_limits_for_test(
- max_total_size: int,
- max_resource_size: int
+def set_extra_http_headers(
+ headers: Headers
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- For testing.
-
- **EXPERIMENTAL**
+ Specifies whether to always send extra HTTP headers with the requests from this page.
- :param max_total_size: Maximum total buffer size.
- :param max_resource_size: Maximum per-resource size.
+ :param headers: Map with extra HTTP headers.
'''
params: T_JSON_DICT = dict()
- params['maxTotalSize'] = max_total_size
- params['maxResourceSize'] = max_resource_size
+ params['headers'] = headers.to_json()
cmd_dict: T_JSON_DICT = {
- 'method': 'Network.setDataSizeLimitsForTest',
+ 'method': 'Network.setExtraHTTPHeaders',
'params': params,
}
json = yield cmd_dict
-def set_extra_http_headers(
- headers: Headers
+def set_attach_debug_stack(
+ enabled: bool
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Specifies whether to always send extra HTTP headers with the requests from this page.
+ Specifies whether to attach a page script stack id in requests
- :param headers: Map with extra HTTP headers.
+ **EXPERIMENTAL**
+
+ :param enabled: Whether to attach a page script stack for debugging purpose.
'''
params: T_JSON_DICT = dict()
- params['headers'] = headers.to_json()
+ params['enabled'] = enabled
cmd_dict: T_JSON_DICT = {
- 'method': 'Network.setExtraHTTPHeaders',
+ 'method': 'Network.setAttachDebugStack',
'params': params,
}
json = yield cmd_dict
@@ -2030,7 +2782,8 @@ def set_request_interception(
def set_user_agent_override(
user_agent: str,
accept_language: typing.Optional[str] = None,
- platform: typing.Optional[str] = None
+ platform: typing.Optional[str] = None,
+ user_agent_metadata: typing.Optional[emulation.UserAgentMetadata] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Allows overriding user agent with the given string.
@@ -2038,6 +2791,7 @@ def set_user_agent_override(
:param user_agent: User agent to use.
:param accept_language: *(Optional)* Browser langugage to emulate.
:param platform: *(Optional)* The platform navigator.platform should return.
+ :param user_agent_metadata: **(EXPERIMENTAL)** *(Optional)* To be sent in Sec-CH-UA-* headers and returned in navigator.userAgentData
'''
params: T_JSON_DICT = dict()
params['userAgent'] = user_agent
@@ -2045,6 +2799,8 @@ def set_user_agent_override(
params['acceptLanguage'] = accept_language
if platform is not None:
params['platform'] = platform
+ if user_agent_metadata is not None:
+ params['userAgentMetadata'] = user_agent_metadata.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Network.setUserAgentOverride',
'params': params,
@@ -2052,6 +2808,76 @@ def set_user_agent_override(
json = yield cmd_dict
+def get_security_isolation_status(
+ frame_id: typing.Optional[page.FrameId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SecurityIsolationStatus]:
+ '''
+ Returns information about the COEP/COOP isolation status.
+
+ **EXPERIMENTAL**
+
+ :param frame_id: *(Optional)* If no frameId is provided, the status of the target is provided.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ if frame_id is not None:
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.getSecurityIsolationStatus',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return SecurityIsolationStatus.from_json(json['status'])
+
+
+def enable_reporting_api(
+ enable: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables tracking for the Reporting API, events generated by the Reporting API will now be delivered to the client.
+ Enabling triggers 'reportingApiReportAdded' for all existing reports.
+
+ **EXPERIMENTAL**
+
+ :param enable: Whether to enable or disable events for the Reporting API
+ '''
+ params: T_JSON_DICT = dict()
+ params['enable'] = enable
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.enableReportingApi',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def load_network_resource(
+ url: str,
+ options: LoadNetworkResourceOptions,
+ frame_id: typing.Optional[page.FrameId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,LoadNetworkResourcePageResult]:
+ '''
+ Fetches the resource and returns the content.
+
+ **EXPERIMENTAL**
+
+ :param frame_id: *(Optional)* Frame id to get the resource for. Mandatory for frame targets, and should be omitted for worker targets.
+ :param url: URL of the resource to get content for.
+ :param options: Options for the request.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ if frame_id is not None:
+ params['frameId'] = frame_id.to_json()
+ params['url'] = url
+ params['options'] = options.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Network.loadNetworkResource',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return LoadNetworkResourcePageResult.from_json(json['resource'])
+
+
@event_class('Network.dataReceived')
@dataclass
class DataReceived:
@@ -2123,6 +2949,8 @@ class LoadingFailed:
canceled: typing.Optional[bool]
#: The reason why loading was blocked, if any.
blocked_reason: typing.Optional[BlockedReason]
+ #: The reason why loading was blocked by CORS, if any.
+ cors_error_status: typing.Optional[CorsErrorStatus]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> LoadingFailed:
@@ -2132,7 +2960,8 @@ def from_json(cls, json: T_JSON_DICT) -> LoadingFailed:
type_=ResourceType.from_json(json['type']),
error_text=str(json['errorText']),
canceled=bool(json['canceled']) if 'canceled' in json else None,
- blocked_reason=BlockedReason.from_json(json['blockedReason']) if 'blockedReason' in json else None
+ blocked_reason=BlockedReason.from_json(json['blockedReason']) if 'blockedReason' in json else None,
+ cors_error_status=CorsErrorStatus.from_json(json['corsErrorStatus']) if 'corsErrorStatus' in json else None
)
@@ -2259,6 +3088,10 @@ class RequestWillBeSent:
wall_time: TimeSinceEpoch
#: Request initiator.
initiator: Initiator
+ #: In the case that redirectResponse is populated, this flag indicates whether
+ #: requestWillBeSentExtraInfo and responseReceivedExtraInfo events will be or were emitted
+ #: for the request which was just redirected.
+ redirect_has_extra_info: bool
#: Redirect response data.
redirect_response: typing.Optional[Response]
#: Type of this resource.
@@ -2278,6 +3111,7 @@ def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSent:
timestamp=MonotonicTime.from_json(json['timestamp']),
wall_time=TimeSinceEpoch.from_json(json['wallTime']),
initiator=Initiator.from_json(json['initiator']),
+ redirect_has_extra_info=bool(json['redirectHasExtraInfo']),
redirect_response=Response.from_json(json['redirectResponse']) if 'redirectResponse' in json else None,
type_=ResourceType.from_json(json['type']) if 'type' in json else None,
frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
@@ -2346,6 +3180,9 @@ class ResponseReceived:
type_: ResourceType
#: Response data.
response: Response
+ #: Indicates whether requestWillBeSentExtraInfo and responseReceivedExtraInfo events will be
+ #: or were emitted for this request.
+ has_extra_info: bool
#: Frame identifier.
frame_id: typing.Optional[page.FrameId]
@@ -2357,6 +3194,7 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceived:
timestamp=MonotonicTime.from_json(json['timestamp']),
type_=ResourceType.from_json(json['type']),
response=Response.from_json(json['response']),
+ has_extra_info=bool(json['hasExtraInfo']),
frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None
)
@@ -2515,6 +3353,69 @@ def from_json(cls, json: T_JSON_DICT) -> WebSocketWillSendHandshakeRequest:
)
+@event_class('Network.webTransportCreated')
+@dataclass
+class WebTransportCreated:
+ '''
+ Fired upon WebTransport creation.
+ '''
+ #: WebTransport identifier.
+ transport_id: RequestId
+ #: WebTransport request URL.
+ url: str
+ #: Timestamp.
+ timestamp: MonotonicTime
+ #: Request initiator.
+ initiator: typing.Optional[Initiator]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> WebTransportCreated:
+ return cls(
+ transport_id=RequestId.from_json(json['transportId']),
+ url=str(json['url']),
+ timestamp=MonotonicTime.from_json(json['timestamp']),
+ initiator=Initiator.from_json(json['initiator']) if 'initiator' in json else None
+ )
+
+
+@event_class('Network.webTransportConnectionEstablished')
+@dataclass
+class WebTransportConnectionEstablished:
+ '''
+ Fired when WebTransport handshake is finished.
+ '''
+ #: WebTransport identifier.
+ transport_id: RequestId
+ #: Timestamp.
+ timestamp: MonotonicTime
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> WebTransportConnectionEstablished:
+ return cls(
+ transport_id=RequestId.from_json(json['transportId']),
+ timestamp=MonotonicTime.from_json(json['timestamp'])
+ )
+
+
+@event_class('Network.webTransportClosed')
+@dataclass
+class WebTransportClosed:
+ '''
+ Fired when WebTransport is disposed.
+ '''
+ #: WebTransport identifier.
+ transport_id: RequestId
+ #: Timestamp.
+ timestamp: MonotonicTime
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> WebTransportClosed:
+ return cls(
+ transport_id=RequestId.from_json(json['transportId']),
+ timestamp=MonotonicTime.from_json(json['timestamp'])
+ )
+
+
@event_class('Network.requestWillBeSentExtraInfo')
@dataclass
class RequestWillBeSentExtraInfo:
@@ -2528,18 +3429,24 @@ class RequestWillBeSentExtraInfo:
'''
#: Request identifier. Used to match this information to an existing requestWillBeSent event.
request_id: RequestId
- #: A list of cookies which will not be sent with this request along with corresponding reasons
- #: for blocking.
- blocked_cookies: typing.List[BlockedCookieWithReason]
+ #: A list of cookies potentially associated to the requested URL. This includes both cookies sent with
+ #: the request and the ones not sent; the latter are distinguished by having blockedReason field set.
+ associated_cookies: typing.List[BlockedCookieWithReason]
#: Raw request headers as they will be sent over the wire.
headers: Headers
+ #: Connection timing information for the request.
+ connect_timing: ConnectTiming
+ #: The client security state set for the request.
+ client_security_state: typing.Optional[ClientSecurityState]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSentExtraInfo:
return cls(
request_id=RequestId.from_json(json['requestId']),
- blocked_cookies=[BlockedCookieWithReason.from_json(i) for i in json['blockedCookies']],
- headers=Headers.from_json(json['headers'])
+ associated_cookies=[BlockedCookieWithReason.from_json(i) for i in json['associatedCookies']],
+ headers=Headers.from_json(json['headers']),
+ connect_timing=ConnectTiming.from_json(json['connectTiming']),
+ client_security_state=ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None
)
@@ -2561,6 +3468,13 @@ class ResponseReceivedExtraInfo:
blocked_cookies: typing.List[BlockedSetCookieWithReason]
#: Raw response headers as they were received over the wire.
headers: Headers
+ #: The IP address space of the resource. The address space can only be determined once the transport
+ #: established the connection, so we can't send it in ``requestWillBeSentExtraInfo``.
+ resource_ip_address_space: IPAddressSpace
+ #: The status code of the response. This is useful in cases the request failed and no responseReceived
+ #: event is triggered, which is the case for, e.g., CORS errors. This is also the correct status code
+ #: for cached requests, where the status in responseReceived is a 200 and this will be 304.
+ status_code: int
#: Raw response header text as it was received over the wire. The raw text may not always be
#: available, such as in the case of HTTP/2 or QUIC.
headers_text: typing.Optional[str]
@@ -2571,5 +3485,198 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceivedExtraInfo:
request_id=RequestId.from_json(json['requestId']),
blocked_cookies=[BlockedSetCookieWithReason.from_json(i) for i in json['blockedCookies']],
headers=Headers.from_json(json['headers']),
+ resource_ip_address_space=IPAddressSpace.from_json(json['resourceIPAddressSpace']),
+ status_code=int(json['statusCode']),
headers_text=str(json['headersText']) if 'headersText' in json else None
)
+
+
+@event_class('Network.trustTokenOperationDone')
+@dataclass
+class TrustTokenOperationDone:
+ '''
+ **EXPERIMENTAL**
+
+ Fired exactly once for each Trust Token operation. Depending on
+ the type of the operation and whether the operation succeeded or
+ failed, the event is fired before the corresponding request was sent
+ or after the response was received.
+ '''
+ #: Detailed success or error status of the operation.
+ #: 'AlreadyExists' also signifies a successful operation, as the result
+ #: of the operation already exists und thus, the operation was abort
+ #: preemptively (e.g. a cache hit).
+ status: str
+ type_: TrustTokenOperationType
+ request_id: RequestId
+ #: Top level origin. The context in which the operation was attempted.
+ top_level_origin: typing.Optional[str]
+ #: Origin of the issuer in case of a "Issuance" or "Redemption" operation.
+ issuer_origin: typing.Optional[str]
+ #: The number of obtained Trust Tokens on a successful "Issuance" operation.
+ issued_token_count: typing.Optional[int]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TrustTokenOperationDone:
+ return cls(
+ status=str(json['status']),
+ type_=TrustTokenOperationType.from_json(json['type']),
+ request_id=RequestId.from_json(json['requestId']),
+ top_level_origin=str(json['topLevelOrigin']) if 'topLevelOrigin' in json else None,
+ issuer_origin=str(json['issuerOrigin']) if 'issuerOrigin' in json else None,
+ issued_token_count=int(json['issuedTokenCount']) if 'issuedTokenCount' in json else None
+ )
+
+
+@event_class('Network.subresourceWebBundleMetadataReceived')
+@dataclass
+class SubresourceWebBundleMetadataReceived:
+ '''
+ **EXPERIMENTAL**
+
+ Fired once when parsing the .wbn file has succeeded.
+ The event contains the information about the web bundle contents.
+ '''
+ #: Request identifier. Used to match this information to another event.
+ request_id: RequestId
+ #: A list of URLs of resources in the subresource Web Bundle.
+ urls: typing.List[str]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleMetadataReceived:
+ return cls(
+ request_id=RequestId.from_json(json['requestId']),
+ urls=[str(i) for i in json['urls']]
+ )
+
+
+@event_class('Network.subresourceWebBundleMetadataError')
+@dataclass
+class SubresourceWebBundleMetadataError:
+ '''
+ **EXPERIMENTAL**
+
+ Fired once when parsing the .wbn file has failed.
+ '''
+ #: Request identifier. Used to match this information to another event.
+ request_id: RequestId
+ #: Error message
+ error_message: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleMetadataError:
+ return cls(
+ request_id=RequestId.from_json(json['requestId']),
+ error_message=str(json['errorMessage'])
+ )
+
+
+@event_class('Network.subresourceWebBundleInnerResponseParsed')
+@dataclass
+class SubresourceWebBundleInnerResponseParsed:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when handling requests for resources within a .wbn file.
+ Note: this will only be fired for resources that are requested by the webpage.
+ '''
+ #: Request identifier of the subresource request
+ inner_request_id: RequestId
+ #: URL of the subresource resource.
+ inner_request_url: str
+ #: Bundle request identifier. Used to match this information to another event.
+ #: This made be absent in case when the instrumentation was enabled only
+ #: after webbundle was parsed.
+ bundle_request_id: typing.Optional[RequestId]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseParsed:
+ return cls(
+ inner_request_id=RequestId.from_json(json['innerRequestId']),
+ inner_request_url=str(json['innerRequestURL']),
+ bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None
+ )
+
+
+@event_class('Network.subresourceWebBundleInnerResponseError')
+@dataclass
+class SubresourceWebBundleInnerResponseError:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when request for resources within a .wbn file failed.
+ '''
+ #: Request identifier of the subresource request
+ inner_request_id: RequestId
+ #: URL of the subresource resource.
+ inner_request_url: str
+ #: Error message
+ error_message: str
+ #: Bundle request identifier. Used to match this information to another event.
+ #: This made be absent in case when the instrumentation was enabled only
+ #: after webbundle was parsed.
+ bundle_request_id: typing.Optional[RequestId]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseError:
+ return cls(
+ inner_request_id=RequestId.from_json(json['innerRequestId']),
+ inner_request_url=str(json['innerRequestURL']),
+ error_message=str(json['errorMessage']),
+ bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None
+ )
+
+
+@event_class('Network.reportingApiReportAdded')
+@dataclass
+class ReportingApiReportAdded:
+ '''
+ **EXPERIMENTAL**
+
+ Is sent whenever a new report is added.
+ And after 'enableReportingApi' for all existing reports.
+ '''
+ report: ReportingApiReport
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ReportingApiReportAdded:
+ return cls(
+ report=ReportingApiReport.from_json(json['report'])
+ )
+
+
+@event_class('Network.reportingApiReportUpdated')
+@dataclass
+class ReportingApiReportUpdated:
+ '''
+ **EXPERIMENTAL**
+
+
+ '''
+ report: ReportingApiReport
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ReportingApiReportUpdated:
+ return cls(
+ report=ReportingApiReport.from_json(json['report'])
+ )
+
+
+@event_class('Network.reportingApiEndpointsChangedForOrigin')
+@dataclass
+class ReportingApiEndpointsChangedForOrigin:
+ '''
+ **EXPERIMENTAL**
+
+
+ '''
+ #: Origin of the document(s) which configured the endpoints.
+ origin: str
+ endpoints: typing.List[ReportingApiEndpoint]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ReportingApiEndpointsChangedForOrigin:
+ return cls(
+ origin=str(json['origin']),
+ endpoints=[ReportingApiEndpoint.from_json(i) for i in json['endpoints']]
+ )
diff --git a/cdp/overlay.py b/cdp/overlay.py
index 38229b8..31b788e 100644
--- a/cdp/overlay.py
+++ b/cdp/overlay.py
@@ -14,6 +14,330 @@
from . import dom
from . import page
from . import runtime
+from deprecated.sphinx import deprecated # type: ignore
+
+
+@dataclass
+class SourceOrderConfig:
+ '''
+ Configuration data for drawing the source order of an elements children.
+ '''
+ #: the color to outline the givent element in.
+ parent_outline_color: dom.RGBA
+
+ #: the color to outline the child elements in.
+ child_outline_color: dom.RGBA
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['parentOutlineColor'] = self.parent_outline_color.to_json()
+ json['childOutlineColor'] = self.child_outline_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SourceOrderConfig:
+ return cls(
+ parent_outline_color=dom.RGBA.from_json(json['parentOutlineColor']),
+ child_outline_color=dom.RGBA.from_json(json['childOutlineColor']),
+ )
+
+
+@dataclass
+class GridHighlightConfig:
+ '''
+ Configuration data for the highlighting of Grid elements.
+ '''
+ #: Whether the extension lines from grid cells to the rulers should be shown (default: false).
+ show_grid_extension_lines: typing.Optional[bool] = None
+
+ #: Show Positive line number labels (default: false).
+ show_positive_line_numbers: typing.Optional[bool] = None
+
+ #: Show Negative line number labels (default: false).
+ show_negative_line_numbers: typing.Optional[bool] = None
+
+ #: Show area name labels (default: false).
+ show_area_names: typing.Optional[bool] = None
+
+ #: Show line name labels (default: false).
+ show_line_names: typing.Optional[bool] = None
+
+ #: Show track size labels (default: false).
+ show_track_sizes: typing.Optional[bool] = None
+
+ #: The grid container border highlight color (default: transparent).
+ grid_border_color: typing.Optional[dom.RGBA] = None
+
+ #: The cell border color (default: transparent). Deprecated, please use rowLineColor and columnLineColor instead.
+ cell_border_color: typing.Optional[dom.RGBA] = None
+
+ #: The row line color (default: transparent).
+ row_line_color: typing.Optional[dom.RGBA] = None
+
+ #: The column line color (default: transparent).
+ column_line_color: typing.Optional[dom.RGBA] = None
+
+ #: Whether the grid border is dashed (default: false).
+ grid_border_dash: typing.Optional[bool] = None
+
+ #: Whether the cell border is dashed (default: false). Deprecated, please us rowLineDash and columnLineDash instead.
+ cell_border_dash: typing.Optional[bool] = None
+
+ #: Whether row lines are dashed (default: false).
+ row_line_dash: typing.Optional[bool] = None
+
+ #: Whether column lines are dashed (default: false).
+ column_line_dash: typing.Optional[bool] = None
+
+ #: The row gap highlight fill color (default: transparent).
+ row_gap_color: typing.Optional[dom.RGBA] = None
+
+ #: The row gap hatching fill color (default: transparent).
+ row_hatch_color: typing.Optional[dom.RGBA] = None
+
+ #: The column gap highlight fill color (default: transparent).
+ column_gap_color: typing.Optional[dom.RGBA] = None
+
+ #: The column gap hatching fill color (default: transparent).
+ column_hatch_color: typing.Optional[dom.RGBA] = None
+
+ #: The named grid areas border color (Default: transparent).
+ area_border_color: typing.Optional[dom.RGBA] = None
+
+ #: The grid container background color (Default: transparent).
+ grid_background_color: typing.Optional[dom.RGBA] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.show_grid_extension_lines is not None:
+ json['showGridExtensionLines'] = self.show_grid_extension_lines
+ if self.show_positive_line_numbers is not None:
+ json['showPositiveLineNumbers'] = self.show_positive_line_numbers
+ if self.show_negative_line_numbers is not None:
+ json['showNegativeLineNumbers'] = self.show_negative_line_numbers
+ if self.show_area_names is not None:
+ json['showAreaNames'] = self.show_area_names
+ if self.show_line_names is not None:
+ json['showLineNames'] = self.show_line_names
+ if self.show_track_sizes is not None:
+ json['showTrackSizes'] = self.show_track_sizes
+ if self.grid_border_color is not None:
+ json['gridBorderColor'] = self.grid_border_color.to_json()
+ if self.cell_border_color is not None:
+ json['cellBorderColor'] = self.cell_border_color.to_json()
+ if self.row_line_color is not None:
+ json['rowLineColor'] = self.row_line_color.to_json()
+ if self.column_line_color is not None:
+ json['columnLineColor'] = self.column_line_color.to_json()
+ if self.grid_border_dash is not None:
+ json['gridBorderDash'] = self.grid_border_dash
+ if self.cell_border_dash is not None:
+ json['cellBorderDash'] = self.cell_border_dash
+ if self.row_line_dash is not None:
+ json['rowLineDash'] = self.row_line_dash
+ if self.column_line_dash is not None:
+ json['columnLineDash'] = self.column_line_dash
+ if self.row_gap_color is not None:
+ json['rowGapColor'] = self.row_gap_color.to_json()
+ if self.row_hatch_color is not None:
+ json['rowHatchColor'] = self.row_hatch_color.to_json()
+ if self.column_gap_color is not None:
+ json['columnGapColor'] = self.column_gap_color.to_json()
+ if self.column_hatch_color is not None:
+ json['columnHatchColor'] = self.column_hatch_color.to_json()
+ if self.area_border_color is not None:
+ json['areaBorderColor'] = self.area_border_color.to_json()
+ if self.grid_background_color is not None:
+ json['gridBackgroundColor'] = self.grid_background_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> GridHighlightConfig:
+ return cls(
+ show_grid_extension_lines=bool(json['showGridExtensionLines']) if 'showGridExtensionLines' in json else None,
+ show_positive_line_numbers=bool(json['showPositiveLineNumbers']) if 'showPositiveLineNumbers' in json else None,
+ show_negative_line_numbers=bool(json['showNegativeLineNumbers']) if 'showNegativeLineNumbers' in json else None,
+ show_area_names=bool(json['showAreaNames']) if 'showAreaNames' in json else None,
+ show_line_names=bool(json['showLineNames']) if 'showLineNames' in json else None,
+ show_track_sizes=bool(json['showTrackSizes']) if 'showTrackSizes' in json else None,
+ grid_border_color=dom.RGBA.from_json(json['gridBorderColor']) if 'gridBorderColor' in json else None,
+ cell_border_color=dom.RGBA.from_json(json['cellBorderColor']) if 'cellBorderColor' in json else None,
+ row_line_color=dom.RGBA.from_json(json['rowLineColor']) if 'rowLineColor' in json else None,
+ column_line_color=dom.RGBA.from_json(json['columnLineColor']) if 'columnLineColor' in json else None,
+ grid_border_dash=bool(json['gridBorderDash']) if 'gridBorderDash' in json else None,
+ cell_border_dash=bool(json['cellBorderDash']) if 'cellBorderDash' in json else None,
+ row_line_dash=bool(json['rowLineDash']) if 'rowLineDash' in json else None,
+ column_line_dash=bool(json['columnLineDash']) if 'columnLineDash' in json else None,
+ row_gap_color=dom.RGBA.from_json(json['rowGapColor']) if 'rowGapColor' in json else None,
+ row_hatch_color=dom.RGBA.from_json(json['rowHatchColor']) if 'rowHatchColor' in json else None,
+ column_gap_color=dom.RGBA.from_json(json['columnGapColor']) if 'columnGapColor' in json else None,
+ column_hatch_color=dom.RGBA.from_json(json['columnHatchColor']) if 'columnHatchColor' in json else None,
+ area_border_color=dom.RGBA.from_json(json['areaBorderColor']) if 'areaBorderColor' in json else None,
+ grid_background_color=dom.RGBA.from_json(json['gridBackgroundColor']) if 'gridBackgroundColor' in json else None,
+ )
+
+
+@dataclass
+class FlexContainerHighlightConfig:
+ '''
+ Configuration data for the highlighting of Flex container elements.
+ '''
+ #: The style of the container border
+ container_border: typing.Optional[LineStyle] = None
+
+ #: The style of the separator between lines
+ line_separator: typing.Optional[LineStyle] = None
+
+ #: The style of the separator between items
+ item_separator: typing.Optional[LineStyle] = None
+
+ #: Style of content-distribution space on the main axis (justify-content).
+ main_distributed_space: typing.Optional[BoxStyle] = None
+
+ #: Style of content-distribution space on the cross axis (align-content).
+ cross_distributed_space: typing.Optional[BoxStyle] = None
+
+ #: Style of empty space caused by row gaps (gap/row-gap).
+ row_gap_space: typing.Optional[BoxStyle] = None
+
+ #: Style of empty space caused by columns gaps (gap/column-gap).
+ column_gap_space: typing.Optional[BoxStyle] = None
+
+ #: Style of the self-alignment line (align-items).
+ cross_alignment: typing.Optional[LineStyle] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.container_border is not None:
+ json['containerBorder'] = self.container_border.to_json()
+ if self.line_separator is not None:
+ json['lineSeparator'] = self.line_separator.to_json()
+ if self.item_separator is not None:
+ json['itemSeparator'] = self.item_separator.to_json()
+ if self.main_distributed_space is not None:
+ json['mainDistributedSpace'] = self.main_distributed_space.to_json()
+ if self.cross_distributed_space is not None:
+ json['crossDistributedSpace'] = self.cross_distributed_space.to_json()
+ if self.row_gap_space is not None:
+ json['rowGapSpace'] = self.row_gap_space.to_json()
+ if self.column_gap_space is not None:
+ json['columnGapSpace'] = self.column_gap_space.to_json()
+ if self.cross_alignment is not None:
+ json['crossAlignment'] = self.cross_alignment.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FlexContainerHighlightConfig:
+ return cls(
+ container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None,
+ line_separator=LineStyle.from_json(json['lineSeparator']) if 'lineSeparator' in json else None,
+ item_separator=LineStyle.from_json(json['itemSeparator']) if 'itemSeparator' in json else None,
+ main_distributed_space=BoxStyle.from_json(json['mainDistributedSpace']) if 'mainDistributedSpace' in json else None,
+ cross_distributed_space=BoxStyle.from_json(json['crossDistributedSpace']) if 'crossDistributedSpace' in json else None,
+ row_gap_space=BoxStyle.from_json(json['rowGapSpace']) if 'rowGapSpace' in json else None,
+ column_gap_space=BoxStyle.from_json(json['columnGapSpace']) if 'columnGapSpace' in json else None,
+ cross_alignment=LineStyle.from_json(json['crossAlignment']) if 'crossAlignment' in json else None,
+ )
+
+
+@dataclass
+class FlexItemHighlightConfig:
+ '''
+ Configuration data for the highlighting of Flex item elements.
+ '''
+ #: Style of the box representing the item's base size
+ base_size_box: typing.Optional[BoxStyle] = None
+
+ #: Style of the border around the box representing the item's base size
+ base_size_border: typing.Optional[LineStyle] = None
+
+ #: Style of the arrow representing if the item grew or shrank
+ flexibility_arrow: typing.Optional[LineStyle] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.base_size_box is not None:
+ json['baseSizeBox'] = self.base_size_box.to_json()
+ if self.base_size_border is not None:
+ json['baseSizeBorder'] = self.base_size_border.to_json()
+ if self.flexibility_arrow is not None:
+ json['flexibilityArrow'] = self.flexibility_arrow.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FlexItemHighlightConfig:
+ return cls(
+ base_size_box=BoxStyle.from_json(json['baseSizeBox']) if 'baseSizeBox' in json else None,
+ base_size_border=LineStyle.from_json(json['baseSizeBorder']) if 'baseSizeBorder' in json else None,
+ flexibility_arrow=LineStyle.from_json(json['flexibilityArrow']) if 'flexibilityArrow' in json else None,
+ )
+
+
+@dataclass
+class LineStyle:
+ '''
+ Style information for drawing a line.
+ '''
+ #: The color of the line (default: transparent)
+ color: typing.Optional[dom.RGBA] = None
+
+ #: The line pattern (default: solid)
+ pattern: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.color is not None:
+ json['color'] = self.color.to_json()
+ if self.pattern is not None:
+ json['pattern'] = self.pattern
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LineStyle:
+ return cls(
+ color=dom.RGBA.from_json(json['color']) if 'color' in json else None,
+ pattern=str(json['pattern']) if 'pattern' in json else None,
+ )
+
+
+@dataclass
+class BoxStyle:
+ '''
+ Style information for drawing a box.
+ '''
+ #: The background color for the box (default: transparent)
+ fill_color: typing.Optional[dom.RGBA] = None
+
+ #: The hatching color for the box (default: transparent)
+ hatch_color: typing.Optional[dom.RGBA] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.fill_color is not None:
+ json['fillColor'] = self.fill_color.to_json()
+ if self.hatch_color is not None:
+ json['hatchColor'] = self.hatch_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BoxStyle:
+ return cls(
+ fill_color=dom.RGBA.from_json(json['fillColor']) if 'fillColor' in json else None,
+ hatch_color=dom.RGBA.from_json(json['hatchColor']) if 'hatchColor' in json else None,
+ )
+
+
+class ContrastAlgorithm(enum.Enum):
+ AA = "aa"
+ AAA = "aaa"
+ APCA = "apca"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ContrastAlgorithm:
+ return cls(json)
@dataclass
@@ -30,6 +354,9 @@ class HighlightConfig:
#: Whether the rulers should be shown (default: false).
show_rulers: typing.Optional[bool] = None
+ #: Whether the a11y info should be shown (default: true).
+ show_accessibility_info: typing.Optional[bool] = None
+
#: Whether the extension lines from node to the rulers should be shown (default: false).
show_extension_lines: typing.Optional[bool] = None
@@ -57,6 +384,24 @@ class HighlightConfig:
#: The grid layout color (default: transparent).
css_grid_color: typing.Optional[dom.RGBA] = None
+ #: The color format used to format color styles (default: hex).
+ color_format: typing.Optional[ColorFormat] = None
+
+ #: The grid layout highlight configuration (default: all transparent).
+ grid_highlight_config: typing.Optional[GridHighlightConfig] = None
+
+ #: The flex container highlight configuration (default: all transparent).
+ flex_container_highlight_config: typing.Optional[FlexContainerHighlightConfig] = None
+
+ #: The flex item highlight configuration (default: all transparent).
+ flex_item_highlight_config: typing.Optional[FlexItemHighlightConfig] = None
+
+ #: The contrast algorithm to use for the contrast ratio (default: aa).
+ contrast_algorithm: typing.Optional[ContrastAlgorithm] = None
+
+ #: The container query container highlight configuration (default: all transparent).
+ container_query_container_highlight_config: typing.Optional[ContainerQueryContainerHighlightConfig] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
if self.show_info is not None:
@@ -65,6 +410,8 @@ def to_json(self) -> T_JSON_DICT:
json['showStyles'] = self.show_styles
if self.show_rulers is not None:
json['showRulers'] = self.show_rulers
+ if self.show_accessibility_info is not None:
+ json['showAccessibilityInfo'] = self.show_accessibility_info
if self.show_extension_lines is not None:
json['showExtensionLines'] = self.show_extension_lines
if self.content_color is not None:
@@ -83,6 +430,18 @@ def to_json(self) -> T_JSON_DICT:
json['shapeMarginColor'] = self.shape_margin_color.to_json()
if self.css_grid_color is not None:
json['cssGridColor'] = self.css_grid_color.to_json()
+ if self.color_format is not None:
+ json['colorFormat'] = self.color_format.to_json()
+ if self.grid_highlight_config is not None:
+ json['gridHighlightConfig'] = self.grid_highlight_config.to_json()
+ if self.flex_container_highlight_config is not None:
+ json['flexContainerHighlightConfig'] = self.flex_container_highlight_config.to_json()
+ if self.flex_item_highlight_config is not None:
+ json['flexItemHighlightConfig'] = self.flex_item_highlight_config.to_json()
+ if self.contrast_algorithm is not None:
+ json['contrastAlgorithm'] = self.contrast_algorithm.to_json()
+ if self.container_query_container_highlight_config is not None:
+ json['containerQueryContainerHighlightConfig'] = self.container_query_container_highlight_config.to_json()
return json
@classmethod
@@ -91,6 +450,7 @@ def from_json(cls, json: T_JSON_DICT) -> HighlightConfig:
show_info=bool(json['showInfo']) if 'showInfo' in json else None,
show_styles=bool(json['showStyles']) if 'showStyles' in json else None,
show_rulers=bool(json['showRulers']) if 'showRulers' in json else None,
+ show_accessibility_info=bool(json['showAccessibilityInfo']) if 'showAccessibilityInfo' in json else None,
show_extension_lines=bool(json['showExtensionLines']) if 'showExtensionLines' in json else None,
content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None,
padding_color=dom.RGBA.from_json(json['paddingColor']) if 'paddingColor' in json else None,
@@ -100,6 +460,260 @@ def from_json(cls, json: T_JSON_DICT) -> HighlightConfig:
shape_color=dom.RGBA.from_json(json['shapeColor']) if 'shapeColor' in json else None,
shape_margin_color=dom.RGBA.from_json(json['shapeMarginColor']) if 'shapeMarginColor' in json else None,
css_grid_color=dom.RGBA.from_json(json['cssGridColor']) if 'cssGridColor' in json else None,
+ color_format=ColorFormat.from_json(json['colorFormat']) if 'colorFormat' in json else None,
+ grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']) if 'gridHighlightConfig' in json else None,
+ flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']) if 'flexContainerHighlightConfig' in json else None,
+ flex_item_highlight_config=FlexItemHighlightConfig.from_json(json['flexItemHighlightConfig']) if 'flexItemHighlightConfig' in json else None,
+ contrast_algorithm=ContrastAlgorithm.from_json(json['contrastAlgorithm']) if 'contrastAlgorithm' in json else None,
+ container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']) if 'containerQueryContainerHighlightConfig' in json else None,
+ )
+
+
+class ColorFormat(enum.Enum):
+ RGB = "rgb"
+ HSL = "hsl"
+ HEX_ = "hex"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ColorFormat:
+ return cls(json)
+
+
+@dataclass
+class GridNodeHighlightConfig:
+ '''
+ Configurations for Persistent Grid Highlight
+ '''
+ #: A descriptor for the highlight appearance.
+ grid_highlight_config: GridHighlightConfig
+
+ #: Identifier of the node to highlight.
+ node_id: dom.NodeId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['gridHighlightConfig'] = self.grid_highlight_config.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> GridNodeHighlightConfig:
+ return cls(
+ grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']),
+ node_id=dom.NodeId.from_json(json['nodeId']),
+ )
+
+
+@dataclass
+class FlexNodeHighlightConfig:
+ #: A descriptor for the highlight appearance of flex containers.
+ flex_container_highlight_config: FlexContainerHighlightConfig
+
+ #: Identifier of the node to highlight.
+ node_id: dom.NodeId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['flexContainerHighlightConfig'] = self.flex_container_highlight_config.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FlexNodeHighlightConfig:
+ return cls(
+ flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']),
+ node_id=dom.NodeId.from_json(json['nodeId']),
+ )
+
+
+@dataclass
+class ScrollSnapContainerHighlightConfig:
+ #: The style of the snapport border (default: transparent)
+ snapport_border: typing.Optional[LineStyle] = None
+
+ #: The style of the snap area border (default: transparent)
+ snap_area_border: typing.Optional[LineStyle] = None
+
+ #: The margin highlight fill color (default: transparent).
+ scroll_margin_color: typing.Optional[dom.RGBA] = None
+
+ #: The padding highlight fill color (default: transparent).
+ scroll_padding_color: typing.Optional[dom.RGBA] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.snapport_border is not None:
+ json['snapportBorder'] = self.snapport_border.to_json()
+ if self.snap_area_border is not None:
+ json['snapAreaBorder'] = self.snap_area_border.to_json()
+ if self.scroll_margin_color is not None:
+ json['scrollMarginColor'] = self.scroll_margin_color.to_json()
+ if self.scroll_padding_color is not None:
+ json['scrollPaddingColor'] = self.scroll_padding_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ScrollSnapContainerHighlightConfig:
+ return cls(
+ snapport_border=LineStyle.from_json(json['snapportBorder']) if 'snapportBorder' in json else None,
+ snap_area_border=LineStyle.from_json(json['snapAreaBorder']) if 'snapAreaBorder' in json else None,
+ scroll_margin_color=dom.RGBA.from_json(json['scrollMarginColor']) if 'scrollMarginColor' in json else None,
+ scroll_padding_color=dom.RGBA.from_json(json['scrollPaddingColor']) if 'scrollPaddingColor' in json else None,
+ )
+
+
+@dataclass
+class ScrollSnapHighlightConfig:
+ #: A descriptor for the highlight appearance of scroll snap containers.
+ scroll_snap_container_highlight_config: ScrollSnapContainerHighlightConfig
+
+ #: Identifier of the node to highlight.
+ node_id: dom.NodeId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['scrollSnapContainerHighlightConfig'] = self.scroll_snap_container_highlight_config.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ScrollSnapHighlightConfig:
+ return cls(
+ scroll_snap_container_highlight_config=ScrollSnapContainerHighlightConfig.from_json(json['scrollSnapContainerHighlightConfig']),
+ node_id=dom.NodeId.from_json(json['nodeId']),
+ )
+
+
+@dataclass
+class HingeConfig:
+ '''
+ Configuration for dual screen hinge
+ '''
+ #: A rectangle represent hinge
+ rect: dom.Rect
+
+ #: The content box highlight fill color (default: a dark color).
+ content_color: typing.Optional[dom.RGBA] = None
+
+ #: The content box highlight outline color (default: transparent).
+ outline_color: typing.Optional[dom.RGBA] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['rect'] = self.rect.to_json()
+ if self.content_color is not None:
+ json['contentColor'] = self.content_color.to_json()
+ if self.outline_color is not None:
+ json['outlineColor'] = self.outline_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> HingeConfig:
+ return cls(
+ rect=dom.Rect.from_json(json['rect']),
+ content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None,
+ outline_color=dom.RGBA.from_json(json['outlineColor']) if 'outlineColor' in json else None,
+ )
+
+
+@dataclass
+class ContainerQueryHighlightConfig:
+ #: A descriptor for the highlight appearance of container query containers.
+ container_query_container_highlight_config: ContainerQueryContainerHighlightConfig
+
+ #: Identifier of the container node to highlight.
+ node_id: dom.NodeId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['containerQueryContainerHighlightConfig'] = self.container_query_container_highlight_config.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ContainerQueryHighlightConfig:
+ return cls(
+ container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']),
+ node_id=dom.NodeId.from_json(json['nodeId']),
+ )
+
+
+@dataclass
+class ContainerQueryContainerHighlightConfig:
+ #: The style of the container border.
+ container_border: typing.Optional[LineStyle] = None
+
+ #: The style of the descendants' borders.
+ descendant_border: typing.Optional[LineStyle] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.container_border is not None:
+ json['containerBorder'] = self.container_border.to_json()
+ if self.descendant_border is not None:
+ json['descendantBorder'] = self.descendant_border.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ContainerQueryContainerHighlightConfig:
+ return cls(
+ container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None,
+ descendant_border=LineStyle.from_json(json['descendantBorder']) if 'descendantBorder' in json else None,
+ )
+
+
+@dataclass
+class IsolatedElementHighlightConfig:
+ #: A descriptor for the highlight appearance of an element in isolation mode.
+ isolation_mode_highlight_config: IsolationModeHighlightConfig
+
+ #: Identifier of the isolated element to highlight.
+ node_id: dom.NodeId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['isolationModeHighlightConfig'] = self.isolation_mode_highlight_config.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> IsolatedElementHighlightConfig:
+ return cls(
+ isolation_mode_highlight_config=IsolationModeHighlightConfig.from_json(json['isolationModeHighlightConfig']),
+ node_id=dom.NodeId.from_json(json['nodeId']),
+ )
+
+
+@dataclass
+class IsolationModeHighlightConfig:
+ #: The fill color of the resizers (default: transparent).
+ resizer_color: typing.Optional[dom.RGBA] = None
+
+ #: The fill color for resizer handles (default: transparent).
+ resizer_handle_color: typing.Optional[dom.RGBA] = None
+
+ #: The fill color for the mask covering non-isolated elements (default: transparent).
+ mask_color: typing.Optional[dom.RGBA] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.resizer_color is not None:
+ json['resizerColor'] = self.resizer_color.to_json()
+ if self.resizer_handle_color is not None:
+ json['resizerHandleColor'] = self.resizer_handle_color.to_json()
+ if self.mask_color is not None:
+ json['maskColor'] = self.mask_color.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> IsolationModeHighlightConfig:
+ return cls(
+ resizer_color=dom.RGBA.from_json(json['resizerColor']) if 'resizerColor' in json else None,
+ resizer_handle_color=dom.RGBA.from_json(json['resizerHandleColor']) if 'resizerHandleColor' in json else None,
+ mask_color=dom.RGBA.from_json(json['maskColor']) if 'maskColor' in json else None,
)
@@ -141,7 +755,9 @@ def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def get_highlight_object_for_test(
node_id: dom.NodeId,
include_distance: typing.Optional[bool] = None,
- include_style: typing.Optional[bool] = None
+ include_style: typing.Optional[bool] = None,
+ color_format: typing.Optional[ColorFormat] = None,
+ show_accessibility_info: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,dict]:
'''
For testing.
@@ -149,6 +765,8 @@ def get_highlight_object_for_test(
:param node_id: Id of the node to get highlight object for.
:param include_distance: *(Optional)* Whether to include distance info.
:param include_style: *(Optional)* Whether to include style info.
+ :param color_format: *(Optional)* The color format to get config with (default: hex).
+ :param show_accessibility_info: *(Optional)* Whether to show accessibility info (default: true).
:returns: Highlight data for the node.
'''
params: T_JSON_DICT = dict()
@@ -157,6 +775,10 @@ def get_highlight_object_for_test(
params['includeDistance'] = include_distance
if include_style is not None:
params['includeStyle'] = include_style
+ if color_format is not None:
+ params['colorFormat'] = color_format.to_json()
+ if show_accessibility_info is not None:
+ params['showAccessibilityInfo'] = show_accessibility_info
cmd_dict: T_JSON_DICT = {
'method': 'Overlay.getHighlightObjectForTest',
'params': params,
@@ -165,6 +787,44 @@ def get_highlight_object_for_test(
return dict(json['highlight'])
+def get_grid_highlight_objects_for_test(
+ node_ids: typing.List[dom.NodeId]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,dict]:
+ '''
+ For Persistent Grid testing.
+
+ :param node_ids: Ids of the node to get highlight object for.
+ :returns: Grid Highlight data for the node ids provided.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeIds'] = [i.to_json() for i in node_ids]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.getGridHighlightObjectsForTest',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return dict(json['highlights'])
+
+
+def get_source_order_highlight_object_for_test(
+ node_id: dom.NodeId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,dict]:
+ '''
+ For Source Order Viewer testing.
+
+ :param node_id: Id of the node to highlight.
+ :returns: Source order highlight data for the node id provided.
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.getSourceOrderHighlightObjectForTest',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return dict(json['highlight'])
+
+
def hide_highlight() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Hides any highlight.
@@ -175,6 +835,7 @@ def hide_highlight() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
+@deprecated(version="1.3")
def highlight_frame(
frame_id: page.FrameId,
content_color: typing.Optional[dom.RGBA] = None,
@@ -182,6 +843,11 @@ def highlight_frame(
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Highlights owner element of the frame with given id.
+ Deprecated: Doesn't work reliablity and cannot be fixed due to process
+ separatation (the owner node might be in a different process). Determine
+ the owner node in the client and use highlightNode.
+
+ .. deprecated:: 1.3
:param frame_id: Identifier of the frame to highlight.
:param content_color: *(Optional)* The content box highlight fill color (default: transparent).
@@ -293,6 +959,36 @@ def highlight_rect(
json = yield cmd_dict
+def highlight_source_order(
+ source_order_config: SourceOrderConfig,
+ node_id: typing.Optional[dom.NodeId] = None,
+ backend_node_id: typing.Optional[dom.BackendNodeId] = None,
+ object_id: typing.Optional[runtime.RemoteObjectId] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Highlights the source order of the children of the DOM node with given id or with the given
+ JavaScript object wrapper. Either nodeId or objectId must be specified.
+
+ :param source_order_config: A descriptor for the appearance of the overlay drawing.
+ :param node_id: *(Optional)* Identifier of the node to highlight.
+ :param backend_node_id: *(Optional)* Identifier of the backend node to highlight.
+ :param object_id: *(Optional)* JavaScript object id of the node to be highlighted.
+ '''
+ params: T_JSON_DICT = dict()
+ params['sourceOrderConfig'] = source_order_config.to_json()
+ if node_id is not None:
+ params['nodeId'] = node_id.to_json()
+ if backend_node_id is not None:
+ params['backendNodeId'] = backend_node_id.to_json()
+ if object_id is not None:
+ params['objectId'] = object_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.highlightSourceOrder',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_inspect_mode(
mode: InspectMode,
highlight_config: typing.Optional[HighlightConfig] = None
@@ -382,6 +1078,68 @@ def set_show_fps_counter(
json = yield cmd_dict
+def set_show_grid_overlays(
+ grid_node_highlight_configs: typing.List[GridNodeHighlightConfig]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Highlight multiple elements with the CSS Grid overlay.
+
+ :param grid_node_highlight_configs: An array of node identifiers and descriptors for the highlight appearance.
+ '''
+ params: T_JSON_DICT = dict()
+ params['gridNodeHighlightConfigs'] = [i.to_json() for i in grid_node_highlight_configs]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowGridOverlays',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_show_flex_overlays(
+ flex_node_highlight_configs: typing.List[FlexNodeHighlightConfig]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param flex_node_highlight_configs: An array of node identifiers and descriptors for the highlight appearance.
+ '''
+ params: T_JSON_DICT = dict()
+ params['flexNodeHighlightConfigs'] = [i.to_json() for i in flex_node_highlight_configs]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowFlexOverlays',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_show_scroll_snap_overlays(
+ scroll_snap_highlight_configs: typing.List[ScrollSnapHighlightConfig]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param scroll_snap_highlight_configs: An array of node identifiers and descriptors for the highlight appearance.
+ '''
+ params: T_JSON_DICT = dict()
+ params['scrollSnapHighlightConfigs'] = [i.to_json() for i in scroll_snap_highlight_configs]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowScrollSnapOverlays',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_show_container_query_overlays(
+ container_query_highlight_configs: typing.List[ContainerQueryHighlightConfig]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param container_query_highlight_configs: An array of node identifiers and descriptors for the highlight appearance.
+ '''
+ params: T_JSON_DICT = dict()
+ params['containerQueryHighlightConfigs'] = [i.to_json() for i in container_query_highlight_configs]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowContainerQueryOverlays',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_show_paint_rects(
result: bool
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -433,11 +1191,14 @@ def set_show_scroll_bottleneck_rects(
json = yield cmd_dict
+@deprecated(version="1.3")
def set_show_hit_test_borders(
show: bool
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Requests that backend shows hit-test borders on layers
+ Deprecated, no longer has any effect.
+
+ .. deprecated:: 1.3
:param show: True for showing hit-test borders
'''
@@ -450,6 +1211,23 @@ def set_show_hit_test_borders(
json = yield cmd_dict
+def set_show_web_vitals(
+ show: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Request that backend shows an overlay with web vital metrics.
+
+ :param show:
+ '''
+ params: T_JSON_DICT = dict()
+ params['show'] = show
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowWebVitals',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_show_viewport_size_on_resize(
show: bool
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -467,6 +1245,41 @@ def set_show_viewport_size_on_resize(
json = yield cmd_dict
+def set_show_hinge(
+ hinge_config: typing.Optional[HingeConfig] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Add a dual screen device hinge
+
+ :param hinge_config: *(Optional)* hinge data, null means hideHinge
+ '''
+ params: T_JSON_DICT = dict()
+ if hinge_config is not None:
+ params['hingeConfig'] = hinge_config.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowHinge',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_show_isolated_elements(
+ isolated_element_highlight_configs: typing.List[IsolatedElementHighlightConfig]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Show elements in isolation mode with overlays.
+
+ :param isolated_element_highlight_configs: An array of node identifiers and descriptors for the highlight appearance.
+ '''
+ params: T_JSON_DICT = dict()
+ params['isolatedElementHighlightConfigs'] = [i.to_json() for i in isolated_element_highlight_configs]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Overlay.setShowIsolatedElements',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
@event_class('Overlay.inspectNodeRequested')
@dataclass
class InspectNodeRequested:
diff --git a/cdp/page.py b/cdp/page.py
index 6eff865..e4ac78f 100644
--- a/cdp/page.py
+++ b/cdp/page.py
@@ -35,13 +35,395 @@ def __repr__(self):
return 'FrameId({})'.format(super().__repr__())
+class AdFrameType(enum.Enum):
+ '''
+ Indicates whether a frame has been identified as an ad.
+ '''
+ NONE = "none"
+ CHILD = "child"
+ ROOT = "root"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AdFrameType:
+ return cls(json)
+
+
+class AdFrameExplanation(enum.Enum):
+ PARENT_IS_AD = "ParentIsAd"
+ CREATED_BY_AD_SCRIPT = "CreatedByAdScript"
+ MATCHED_BLOCKING_RULE = "MatchedBlockingRule"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AdFrameExplanation:
+ return cls(json)
+
+
+@dataclass
+class AdFrameStatus:
+ '''
+ Indicates whether a frame has been identified as an ad and why.
+ '''
+ ad_frame_type: AdFrameType
+
+ explanations: typing.Optional[typing.List[AdFrameExplanation]] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['adFrameType'] = self.ad_frame_type.to_json()
+ if self.explanations is not None:
+ json['explanations'] = [i.to_json() for i in self.explanations]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AdFrameStatus:
+ return cls(
+ ad_frame_type=AdFrameType.from_json(json['adFrameType']),
+ explanations=[AdFrameExplanation.from_json(i) for i in json['explanations']] if 'explanations' in json else None,
+ )
+
+
+class SecureContextType(enum.Enum):
+ '''
+ Indicates whether the frame is a secure context and why it is the case.
+ '''
+ SECURE = "Secure"
+ SECURE_LOCALHOST = "SecureLocalhost"
+ INSECURE_SCHEME = "InsecureScheme"
+ INSECURE_ANCESTOR = "InsecureAncestor"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SecureContextType:
+ return cls(json)
+
+
+class CrossOriginIsolatedContextType(enum.Enum):
+ '''
+ Indicates whether the frame is cross-origin isolated and why it is the case.
+ '''
+ ISOLATED = "Isolated"
+ NOT_ISOLATED = "NotIsolated"
+ NOT_ISOLATED_FEATURE_DISABLED = "NotIsolatedFeatureDisabled"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> CrossOriginIsolatedContextType:
+ return cls(json)
+
+
+class GatedAPIFeatures(enum.Enum):
+ SHARED_ARRAY_BUFFERS = "SharedArrayBuffers"
+ SHARED_ARRAY_BUFFERS_TRANSFER_ALLOWED = "SharedArrayBuffersTransferAllowed"
+ PERFORMANCE_MEASURE_MEMORY = "PerformanceMeasureMemory"
+ PERFORMANCE_PROFILE = "PerformanceProfile"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> GatedAPIFeatures:
+ return cls(json)
+
+
+class PermissionsPolicyFeature(enum.Enum):
+ '''
+ All Permissions Policy features. This enum should match the one defined
+ in third_party/blink/renderer/core/permissions_policy/permissions_policy_features.json5.
+ '''
+ ACCELEROMETER = "accelerometer"
+ AMBIENT_LIGHT_SENSOR = "ambient-light-sensor"
+ ATTRIBUTION_REPORTING = "attribution-reporting"
+ AUTOPLAY = "autoplay"
+ CAMERA = "camera"
+ CH_DPR = "ch-dpr"
+ CH_DEVICE_MEMORY = "ch-device-memory"
+ CH_DOWNLINK = "ch-downlink"
+ CH_ECT = "ch-ect"
+ CH_PREFERS_COLOR_SCHEME = "ch-prefers-color-scheme"
+ CH_RTT = "ch-rtt"
+ CH_UA = "ch-ua"
+ CH_UA_ARCH = "ch-ua-arch"
+ CH_UA_BITNESS = "ch-ua-bitness"
+ CH_UA_PLATFORM = "ch-ua-platform"
+ CH_UA_MODEL = "ch-ua-model"
+ CH_UA_MOBILE = "ch-ua-mobile"
+ CH_UA_FULL = "ch-ua-full"
+ CH_UA_FULL_VERSION = "ch-ua-full-version"
+ CH_UA_FULL_VERSION_LIST = "ch-ua-full-version-list"
+ CH_UA_PLATFORM_VERSION = "ch-ua-platform-version"
+ CH_UA_REDUCED = "ch-ua-reduced"
+ CH_UA_WOW64 = "ch-ua-wow64"
+ CH_VIEWPORT_HEIGHT = "ch-viewport-height"
+ CH_VIEWPORT_WIDTH = "ch-viewport-width"
+ CH_WIDTH = "ch-width"
+ CH_PARTITIONED_COOKIES = "ch-partitioned-cookies"
+ CLIPBOARD_READ = "clipboard-read"
+ CLIPBOARD_WRITE = "clipboard-write"
+ CROSS_ORIGIN_ISOLATED = "cross-origin-isolated"
+ DIRECT_SOCKETS = "direct-sockets"
+ DISPLAY_CAPTURE = "display-capture"
+ DOCUMENT_DOMAIN = "document-domain"
+ ENCRYPTED_MEDIA = "encrypted-media"
+ EXECUTION_WHILE_OUT_OF_VIEWPORT = "execution-while-out-of-viewport"
+ EXECUTION_WHILE_NOT_RENDERED = "execution-while-not-rendered"
+ FOCUS_WITHOUT_USER_ACTIVATION = "focus-without-user-activation"
+ FULLSCREEN = "fullscreen"
+ FROBULATE = "frobulate"
+ GAMEPAD = "gamepad"
+ GEOLOCATION = "geolocation"
+ GYROSCOPE = "gyroscope"
+ HID = "hid"
+ IDLE_DETECTION = "idle-detection"
+ JOIN_AD_INTEREST_GROUP = "join-ad-interest-group"
+ KEYBOARD_MAP = "keyboard-map"
+ MAGNETOMETER = "magnetometer"
+ MICROPHONE = "microphone"
+ MIDI = "midi"
+ OTP_CREDENTIALS = "otp-credentials"
+ PAYMENT = "payment"
+ PICTURE_IN_PICTURE = "picture-in-picture"
+ PUBLICKEY_CREDENTIALS_GET = "publickey-credentials-get"
+ RUN_AD_AUCTION = "run-ad-auction"
+ SCREEN_WAKE_LOCK = "screen-wake-lock"
+ SERIAL = "serial"
+ SHARED_AUTOFILL = "shared-autofill"
+ STORAGE_ACCESS_API = "storage-access-api"
+ SYNC_XHR = "sync-xhr"
+ TRUST_TOKEN_REDEMPTION = "trust-token-redemption"
+ USB = "usb"
+ VERTICAL_SCROLL = "vertical-scroll"
+ WEB_SHARE = "web-share"
+ WINDOW_PLACEMENT = "window-placement"
+ XR_SPATIAL_TRACKING = "xr-spatial-tracking"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> PermissionsPolicyFeature:
+ return cls(json)
+
+
+class PermissionsPolicyBlockReason(enum.Enum):
+ '''
+ Reason for a permissions policy feature to be disabled.
+ '''
+ HEADER = "Header"
+ IFRAME_ATTRIBUTE = "IframeAttribute"
+ IN_FENCED_FRAME_TREE = "InFencedFrameTree"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> PermissionsPolicyBlockReason:
+ return cls(json)
+
+
+@dataclass
+class PermissionsPolicyBlockLocator:
+ frame_id: FrameId
+
+ block_reason: PermissionsPolicyBlockReason
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['frameId'] = self.frame_id.to_json()
+ json['blockReason'] = self.block_reason.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PermissionsPolicyBlockLocator:
+ return cls(
+ frame_id=FrameId.from_json(json['frameId']),
+ block_reason=PermissionsPolicyBlockReason.from_json(json['blockReason']),
+ )
+
+
+@dataclass
+class PermissionsPolicyFeatureState:
+ feature: PermissionsPolicyFeature
+
+ allowed: bool
+
+ locator: typing.Optional[PermissionsPolicyBlockLocator] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['feature'] = self.feature.to_json()
+ json['allowed'] = self.allowed
+ if self.locator is not None:
+ json['locator'] = self.locator.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PermissionsPolicyFeatureState:
+ return cls(
+ feature=PermissionsPolicyFeature.from_json(json['feature']),
+ allowed=bool(json['allowed']),
+ locator=PermissionsPolicyBlockLocator.from_json(json['locator']) if 'locator' in json else None,
+ )
+
+
+class OriginTrialTokenStatus(enum.Enum):
+ '''
+ Origin Trial(https://www.chromium.org/blink/origin-trials) support.
+ Status for an Origin Trial token.
+ '''
+ SUCCESS = "Success"
+ NOT_SUPPORTED = "NotSupported"
+ INSECURE = "Insecure"
+ EXPIRED = "Expired"
+ WRONG_ORIGIN = "WrongOrigin"
+ INVALID_SIGNATURE = "InvalidSignature"
+ MALFORMED = "Malformed"
+ WRONG_VERSION = "WrongVersion"
+ FEATURE_DISABLED = "FeatureDisabled"
+ TOKEN_DISABLED = "TokenDisabled"
+ FEATURE_DISABLED_FOR_USER = "FeatureDisabledForUser"
+ UNKNOWN_TRIAL = "UnknownTrial"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> OriginTrialTokenStatus:
+ return cls(json)
+
+
+class OriginTrialStatus(enum.Enum):
+ '''
+ Status for an Origin Trial.
+ '''
+ ENABLED = "Enabled"
+ VALID_TOKEN_NOT_PROVIDED = "ValidTokenNotProvided"
+ OS_NOT_SUPPORTED = "OSNotSupported"
+ TRIAL_NOT_ALLOWED = "TrialNotAllowed"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> OriginTrialStatus:
+ return cls(json)
+
+
+class OriginTrialUsageRestriction(enum.Enum):
+ NONE = "None"
+ SUBSET = "Subset"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> OriginTrialUsageRestriction:
+ return cls(json)
+
+
+@dataclass
+class OriginTrialToken:
+ origin: str
+
+ match_sub_domains: bool
+
+ trial_name: str
+
+ expiry_time: network.TimeSinceEpoch
+
+ is_third_party: bool
+
+ usage_restriction: OriginTrialUsageRestriction
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['origin'] = self.origin
+ json['matchSubDomains'] = self.match_sub_domains
+ json['trialName'] = self.trial_name
+ json['expiryTime'] = self.expiry_time.to_json()
+ json['isThirdParty'] = self.is_third_party
+ json['usageRestriction'] = self.usage_restriction.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> OriginTrialToken:
+ return cls(
+ origin=str(json['origin']),
+ match_sub_domains=bool(json['matchSubDomains']),
+ trial_name=str(json['trialName']),
+ expiry_time=network.TimeSinceEpoch.from_json(json['expiryTime']),
+ is_third_party=bool(json['isThirdParty']),
+ usage_restriction=OriginTrialUsageRestriction.from_json(json['usageRestriction']),
+ )
+
+
+@dataclass
+class OriginTrialTokenWithStatus:
+ raw_token_text: str
+
+ status: OriginTrialTokenStatus
+
+ #: ``parsedToken`` is present only when the token is extractable and
+ #: parsable.
+ parsed_token: typing.Optional[OriginTrialToken] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['rawTokenText'] = self.raw_token_text
+ json['status'] = self.status.to_json()
+ if self.parsed_token is not None:
+ json['parsedToken'] = self.parsed_token.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> OriginTrialTokenWithStatus:
+ return cls(
+ raw_token_text=str(json['rawTokenText']),
+ status=OriginTrialTokenStatus.from_json(json['status']),
+ parsed_token=OriginTrialToken.from_json(json['parsedToken']) if 'parsedToken' in json else None,
+ )
+
+
+@dataclass
+class OriginTrial:
+ trial_name: str
+
+ status: OriginTrialStatus
+
+ tokens_with_status: typing.List[OriginTrialTokenWithStatus]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['trialName'] = self.trial_name
+ json['status'] = self.status.to_json()
+ json['tokensWithStatus'] = [i.to_json() for i in self.tokens_with_status]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> OriginTrial:
+ return cls(
+ trial_name=str(json['trialName']),
+ status=OriginTrialStatus.from_json(json['status']),
+ tokens_with_status=[OriginTrialTokenWithStatus.from_json(i) for i in json['tokensWithStatus']],
+ )
+
+
@dataclass
class Frame:
'''
Information about the Frame on the page.
'''
#: Frame unique identifier.
- id_: str
+ id_: FrameId
#: Identifier of the loader associated with this frame.
loader_id: network.LoaderId
@@ -49,14 +431,29 @@ class Frame:
#: Frame document's URL without fragment.
url: str
+ #: Frame document's registered domain, taking the public suffixes list into account.
+ #: Extracted from the Frame's url.
+ #: Example URLs: http://www.google.com/file.html -> "google.com"
+ #: http://a.b.co.uk/file.html -> "b.co.uk"
+ domain_and_registry: str
+
#: Frame document's security origin.
security_origin: str
#: Frame document's mimeType as determined by the browser.
mime_type: str
+ #: Indicates whether the main document is a secure context and explains why that is the case.
+ secure_context_type: SecureContextType
+
+ #: Indicates whether this is a cross origin isolated context.
+ cross_origin_isolated_context_type: CrossOriginIsolatedContextType
+
+ #: Indicated which gated APIs / features are available.
+ gated_api_features: typing.List[GatedAPIFeatures]
+
#: Parent frame identifier.
- parent_id: typing.Optional[str] = None
+ parent_id: typing.Optional[FrameId] = None
#: Frame's name as specified in the tag.
name: typing.Optional[str] = None
@@ -67,35 +464,49 @@ class Frame:
#: If the frame failed to load, this contains the URL that could not be loaded. Note that unlike url above, this URL may contain a fragment.
unreachable_url: typing.Optional[str] = None
+ #: Indicates whether this frame was tagged as an ad and why.
+ ad_frame_status: typing.Optional[AdFrameStatus] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['id'] = self.id_
+ json['id'] = self.id_.to_json()
json['loaderId'] = self.loader_id.to_json()
json['url'] = self.url
+ json['domainAndRegistry'] = self.domain_and_registry
json['securityOrigin'] = self.security_origin
json['mimeType'] = self.mime_type
+ json['secureContextType'] = self.secure_context_type.to_json()
+ json['crossOriginIsolatedContextType'] = self.cross_origin_isolated_context_type.to_json()
+ json['gatedAPIFeatures'] = [i.to_json() for i in self.gated_api_features]
if self.parent_id is not None:
- json['parentId'] = self.parent_id
+ json['parentId'] = self.parent_id.to_json()
if self.name is not None:
json['name'] = self.name
if self.url_fragment is not None:
json['urlFragment'] = self.url_fragment
if self.unreachable_url is not None:
json['unreachableUrl'] = self.unreachable_url
+ if self.ad_frame_status is not None:
+ json['adFrameStatus'] = self.ad_frame_status.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> Frame:
return cls(
- id_=str(json['id']),
+ id_=FrameId.from_json(json['id']),
loader_id=network.LoaderId.from_json(json['loaderId']),
url=str(json['url']),
+ domain_and_registry=str(json['domainAndRegistry']),
security_origin=str(json['securityOrigin']),
mime_type=str(json['mimeType']),
- parent_id=str(json['parentId']) if 'parentId' in json else None,
+ secure_context_type=SecureContextType.from_json(json['secureContextType']),
+ cross_origin_isolated_context_type=CrossOriginIsolatedContextType.from_json(json['crossOriginIsolatedContextType']),
+ gated_api_features=[GatedAPIFeatures.from_json(i) for i in json['gatedAPIFeatures']],
+ parent_id=FrameId.from_json(json['parentId']) if 'parentId' in json else None,
name=str(json['name']) if 'name' in json else None,
url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None,
unreachable_url=str(json['unreachableUrl']) if 'unreachableUrl' in json else None,
+ ad_frame_status=AdFrameStatus.from_json(json['adFrameStatus']) if 'adFrameStatus' in json else None,
)
@@ -394,6 +805,26 @@ def from_json(cls, json: T_JSON_DICT) -> AppManifestError:
)
+@dataclass
+class AppManifestParsedProperties:
+ '''
+ Parsed app manifest properties.
+ '''
+ #: Computed scope value
+ scope: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['scope'] = self.scope
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AppManifestParsedProperties:
+ return cls(
+ scope=str(json['scope']),
+ )
+
+
@dataclass
class LayoutViewport:
'''
@@ -582,6 +1013,31 @@ def from_json(cls, json: T_JSON_DICT) -> FontFamilies:
)
+@dataclass
+class ScriptFontFamilies:
+ '''
+ Font families collection for a script.
+ '''
+ #: Name of the script which these font families are defined for.
+ script: str
+
+ #: Generic font families collection for the script.
+ font_families: FontFamilies
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['script'] = self.script
+ json['fontFamilies'] = self.font_families.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> ScriptFontFamilies:
+ return cls(
+ script=str(json['script']),
+ font_families=FontFamilies.from_json(json['fontFamilies']),
+ )
+
+
@dataclass
class FontSizes:
'''
@@ -617,6 +1073,7 @@ class ClientNavigationReason(enum.Enum):
META_TAG_REFRESH = "metaTagRefresh"
PAGE_BLOCK_INTERSTITIAL = "pageBlockInterstitial"
RELOAD = "reload"
+ ANCHOR_CLICK = "anchorClick"
def to_json(self) -> str:
return self.value
@@ -626,6 +1083,331 @@ def from_json(cls, json: str) -> ClientNavigationReason:
return cls(json)
+class ClientNavigationDisposition(enum.Enum):
+ CURRENT_TAB = "currentTab"
+ NEW_TAB = "newTab"
+ NEW_WINDOW = "newWindow"
+ DOWNLOAD = "download"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ClientNavigationDisposition:
+ return cls(json)
+
+
+@dataclass
+class InstallabilityErrorArgument:
+ #: Argument name (e.g. name:'minimum-icon-size-in-pixels').
+ name: str
+
+ #: Argument value (e.g. value:'64').
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InstallabilityErrorArgument:
+ return cls(
+ name=str(json['name']),
+ value=str(json['value']),
+ )
+
+
+@dataclass
+class InstallabilityError:
+ '''
+ The installability error
+ '''
+ #: The error id (e.g. 'manifest-missing-suitable-icon').
+ error_id: str
+
+ #: The list of error arguments (e.g. {name:'minimum-icon-size-in-pixels', value:'64'}).
+ error_arguments: typing.List[InstallabilityErrorArgument]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['errorId'] = self.error_id
+ json['errorArguments'] = [i.to_json() for i in self.error_arguments]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InstallabilityError:
+ return cls(
+ error_id=str(json['errorId']),
+ error_arguments=[InstallabilityErrorArgument.from_json(i) for i in json['errorArguments']],
+ )
+
+
+class ReferrerPolicy(enum.Enum):
+ '''
+ The referring-policy used for the navigation.
+ '''
+ NO_REFERRER = "noReferrer"
+ NO_REFERRER_WHEN_DOWNGRADE = "noReferrerWhenDowngrade"
+ ORIGIN = "origin"
+ ORIGIN_WHEN_CROSS_ORIGIN = "originWhenCrossOrigin"
+ SAME_ORIGIN = "sameOrigin"
+ STRICT_ORIGIN = "strictOrigin"
+ STRICT_ORIGIN_WHEN_CROSS_ORIGIN = "strictOriginWhenCrossOrigin"
+ UNSAFE_URL = "unsafeUrl"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ReferrerPolicy:
+ return cls(json)
+
+
+@dataclass
+class CompilationCacheParams:
+ '''
+ Per-script compilation cache parameters for ``Page.produceCompilationCache``
+ '''
+ #: The URL of the script to produce a compilation cache entry for.
+ url: str
+
+ #: A hint to the backend whether eager compilation is recommended.
+ #: (the actual compilation mode used is upon backend discretion).
+ eager: typing.Optional[bool] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ if self.eager is not None:
+ json['eager'] = self.eager
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CompilationCacheParams:
+ return cls(
+ url=str(json['url']),
+ eager=bool(json['eager']) if 'eager' in json else None,
+ )
+
+
+class NavigationType(enum.Enum):
+ '''
+ The type of a frameNavigated event.
+ '''
+ NAVIGATION = "Navigation"
+ BACK_FORWARD_CACHE_RESTORE = "BackForwardCacheRestore"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> NavigationType:
+ return cls(json)
+
+
+class BackForwardCacheNotRestoredReason(enum.Enum):
+ '''
+ List of not restored reasons for back-forward cache.
+ '''
+ NOT_PRIMARY_MAIN_FRAME = "NotPrimaryMainFrame"
+ BACK_FORWARD_CACHE_DISABLED = "BackForwardCacheDisabled"
+ RELATED_ACTIVE_CONTENTS_EXIST = "RelatedActiveContentsExist"
+ HTTP_STATUS_NOT_OK = "HTTPStatusNotOK"
+ SCHEME_NOT_HTTP_OR_HTTPS = "SchemeNotHTTPOrHTTPS"
+ LOADING = "Loading"
+ WAS_GRANTED_MEDIA_ACCESS = "WasGrantedMediaAccess"
+ DISABLE_FOR_RENDER_FRAME_HOST_CALLED = "DisableForRenderFrameHostCalled"
+ DOMAIN_NOT_ALLOWED = "DomainNotAllowed"
+ HTTP_METHOD_NOT_GET = "HTTPMethodNotGET"
+ SUBFRAME_IS_NAVIGATING = "SubframeIsNavigating"
+ TIMEOUT = "Timeout"
+ CACHE_LIMIT = "CacheLimit"
+ JAVA_SCRIPT_EXECUTION = "JavaScriptExecution"
+ RENDERER_PROCESS_KILLED = "RendererProcessKilled"
+ RENDERER_PROCESS_CRASHED = "RendererProcessCrashed"
+ GRANTED_MEDIA_STREAM_ACCESS = "GrantedMediaStreamAccess"
+ SCHEDULER_TRACKED_FEATURE_USED = "SchedulerTrackedFeatureUsed"
+ CONFLICTING_BROWSING_INSTANCE = "ConflictingBrowsingInstance"
+ CACHE_FLUSHED = "CacheFlushed"
+ SERVICE_WORKER_VERSION_ACTIVATION = "ServiceWorkerVersionActivation"
+ SESSION_RESTORED = "SessionRestored"
+ SERVICE_WORKER_POST_MESSAGE = "ServiceWorkerPostMessage"
+ ENTERED_BACK_FORWARD_CACHE_BEFORE_SERVICE_WORKER_HOST_ADDED = "EnteredBackForwardCacheBeforeServiceWorkerHostAdded"
+ RENDER_FRAME_HOST_REUSED_SAME_SITE = "RenderFrameHostReused_SameSite"
+ RENDER_FRAME_HOST_REUSED_CROSS_SITE = "RenderFrameHostReused_CrossSite"
+ SERVICE_WORKER_CLAIM = "ServiceWorkerClaim"
+ IGNORE_EVENT_AND_EVICT = "IgnoreEventAndEvict"
+ HAVE_INNER_CONTENTS = "HaveInnerContents"
+ TIMEOUT_PUTTING_IN_CACHE = "TimeoutPuttingInCache"
+ BACK_FORWARD_CACHE_DISABLED_BY_LOW_MEMORY = "BackForwardCacheDisabledByLowMemory"
+ BACK_FORWARD_CACHE_DISABLED_BY_COMMAND_LINE = "BackForwardCacheDisabledByCommandLine"
+ NETWORK_REQUEST_DATAPIPE_DRAINED_AS_BYTES_CONSUMER = "NetworkRequestDatapipeDrainedAsBytesConsumer"
+ NETWORK_REQUEST_REDIRECTED = "NetworkRequestRedirected"
+ NETWORK_REQUEST_TIMEOUT = "NetworkRequestTimeout"
+ NETWORK_EXCEEDS_BUFFER_LIMIT = "NetworkExceedsBufferLimit"
+ NAVIGATION_CANCELLED_WHILE_RESTORING = "NavigationCancelledWhileRestoring"
+ NOT_MOST_RECENT_NAVIGATION_ENTRY = "NotMostRecentNavigationEntry"
+ BACK_FORWARD_CACHE_DISABLED_FOR_PRERENDER = "BackForwardCacheDisabledForPrerender"
+ USER_AGENT_OVERRIDE_DIFFERS = "UserAgentOverrideDiffers"
+ FOREGROUND_CACHE_LIMIT = "ForegroundCacheLimit"
+ BROWSING_INSTANCE_NOT_SWAPPED = "BrowsingInstanceNotSwapped"
+ BACK_FORWARD_CACHE_DISABLED_FOR_DELEGATE = "BackForwardCacheDisabledForDelegate"
+ OPT_IN_UNLOAD_HEADER_NOT_PRESENT = "OptInUnloadHeaderNotPresent"
+ UNLOAD_HANDLER_EXISTS_IN_MAIN_FRAME = "UnloadHandlerExistsInMainFrame"
+ UNLOAD_HANDLER_EXISTS_IN_SUB_FRAME = "UnloadHandlerExistsInSubFrame"
+ SERVICE_WORKER_UNREGISTRATION = "ServiceWorkerUnregistration"
+ CACHE_CONTROL_NO_STORE = "CacheControlNoStore"
+ CACHE_CONTROL_NO_STORE_COOKIE_MODIFIED = "CacheControlNoStoreCookieModified"
+ CACHE_CONTROL_NO_STORE_HTTP_ONLY_COOKIE_MODIFIED = "CacheControlNoStoreHTTPOnlyCookieModified"
+ NO_RESPONSE_HEAD = "NoResponseHead"
+ UNKNOWN = "Unknown"
+ ACTIVATION_NAVIGATIONS_DISALLOWED_FOR_BUG1234857 = "ActivationNavigationsDisallowedForBug1234857"
+ WEB_SOCKET = "WebSocket"
+ WEB_TRANSPORT = "WebTransport"
+ WEB_RTC = "WebRTC"
+ MAIN_RESOURCE_HAS_CACHE_CONTROL_NO_STORE = "MainResourceHasCacheControlNoStore"
+ MAIN_RESOURCE_HAS_CACHE_CONTROL_NO_CACHE = "MainResourceHasCacheControlNoCache"
+ SUBRESOURCE_HAS_CACHE_CONTROL_NO_STORE = "SubresourceHasCacheControlNoStore"
+ SUBRESOURCE_HAS_CACHE_CONTROL_NO_CACHE = "SubresourceHasCacheControlNoCache"
+ CONTAINS_PLUGINS = "ContainsPlugins"
+ DOCUMENT_LOADED = "DocumentLoaded"
+ DEDICATED_WORKER_OR_WORKLET = "DedicatedWorkerOrWorklet"
+ OUTSTANDING_NETWORK_REQUEST_OTHERS = "OutstandingNetworkRequestOthers"
+ OUTSTANDING_INDEXED_DB_TRANSACTION = "OutstandingIndexedDBTransaction"
+ REQUESTED_NOTIFICATIONS_PERMISSION = "RequestedNotificationsPermission"
+ REQUESTED_MIDI_PERMISSION = "RequestedMIDIPermission"
+ REQUESTED_AUDIO_CAPTURE_PERMISSION = "RequestedAudioCapturePermission"
+ REQUESTED_VIDEO_CAPTURE_PERMISSION = "RequestedVideoCapturePermission"
+ REQUESTED_BACK_FORWARD_CACHE_BLOCKED_SENSORS = "RequestedBackForwardCacheBlockedSensors"
+ REQUESTED_BACKGROUND_WORK_PERMISSION = "RequestedBackgroundWorkPermission"
+ BROADCAST_CHANNEL = "BroadcastChannel"
+ INDEXED_DB_CONNECTION = "IndexedDBConnection"
+ WEB_XR = "WebXR"
+ SHARED_WORKER = "SharedWorker"
+ WEB_LOCKS = "WebLocks"
+ WEB_HID = "WebHID"
+ WEB_SHARE = "WebShare"
+ REQUESTED_STORAGE_ACCESS_GRANT = "RequestedStorageAccessGrant"
+ WEB_NFC = "WebNfc"
+ OUTSTANDING_NETWORK_REQUEST_FETCH = "OutstandingNetworkRequestFetch"
+ OUTSTANDING_NETWORK_REQUEST_XHR = "OutstandingNetworkRequestXHR"
+ APP_BANNER = "AppBanner"
+ PRINTING = "Printing"
+ WEB_DATABASE = "WebDatabase"
+ PICTURE_IN_PICTURE = "PictureInPicture"
+ PORTAL = "Portal"
+ SPEECH_RECOGNIZER = "SpeechRecognizer"
+ IDLE_MANAGER = "IdleManager"
+ PAYMENT_MANAGER = "PaymentManager"
+ SPEECH_SYNTHESIS = "SpeechSynthesis"
+ KEYBOARD_LOCK = "KeyboardLock"
+ WEB_OTP_SERVICE = "WebOTPService"
+ OUTSTANDING_NETWORK_REQUEST_DIRECT_SOCKET = "OutstandingNetworkRequestDirectSocket"
+ INJECTED_JAVASCRIPT = "InjectedJavascript"
+ INJECTED_STYLE_SHEET = "InjectedStyleSheet"
+ DUMMY = "Dummy"
+ CONTENT_SECURITY_HANDLER = "ContentSecurityHandler"
+ CONTENT_WEB_AUTHENTICATION_API = "ContentWebAuthenticationAPI"
+ CONTENT_FILE_CHOOSER = "ContentFileChooser"
+ CONTENT_SERIAL = "ContentSerial"
+ CONTENT_FILE_SYSTEM_ACCESS = "ContentFileSystemAccess"
+ CONTENT_MEDIA_DEVICES_DISPATCHER_HOST = "ContentMediaDevicesDispatcherHost"
+ CONTENT_WEB_BLUETOOTH = "ContentWebBluetooth"
+ CONTENT_WEB_USB = "ContentWebUSB"
+ CONTENT_MEDIA_SESSION = "ContentMediaSession"
+ CONTENT_MEDIA_SESSION_SERVICE = "ContentMediaSessionService"
+ CONTENT_SCREEN_READER = "ContentScreenReader"
+ EMBEDDER_POPUP_BLOCKER_TAB_HELPER = "EmbedderPopupBlockerTabHelper"
+ EMBEDDER_SAFE_BROWSING_TRIGGERED_POPUP_BLOCKER = "EmbedderSafeBrowsingTriggeredPopupBlocker"
+ EMBEDDER_SAFE_BROWSING_THREAT_DETAILS = "EmbedderSafeBrowsingThreatDetails"
+ EMBEDDER_APP_BANNER_MANAGER = "EmbedderAppBannerManager"
+ EMBEDDER_DOM_DISTILLER_VIEWER_SOURCE = "EmbedderDomDistillerViewerSource"
+ EMBEDDER_DOM_DISTILLER_SELF_DELETING_REQUEST_DELEGATE = "EmbedderDomDistillerSelfDeletingRequestDelegate"
+ EMBEDDER_OOM_INTERVENTION_TAB_HELPER = "EmbedderOomInterventionTabHelper"
+ EMBEDDER_OFFLINE_PAGE = "EmbedderOfflinePage"
+ EMBEDDER_CHROME_PASSWORD_MANAGER_CLIENT_BIND_CREDENTIAL_MANAGER = "EmbedderChromePasswordManagerClientBindCredentialManager"
+ EMBEDDER_PERMISSION_REQUEST_MANAGER = "EmbedderPermissionRequestManager"
+ EMBEDDER_MODAL_DIALOG = "EmbedderModalDialog"
+ EMBEDDER_EXTENSIONS = "EmbedderExtensions"
+ EMBEDDER_EXTENSION_MESSAGING = "EmbedderExtensionMessaging"
+ EMBEDDER_EXTENSION_MESSAGING_FOR_OPEN_PORT = "EmbedderExtensionMessagingForOpenPort"
+ EMBEDDER_EXTENSION_SENT_MESSAGE_TO_CACHED_FRAME = "EmbedderExtensionSentMessageToCachedFrame"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> BackForwardCacheNotRestoredReason:
+ return cls(json)
+
+
+class BackForwardCacheNotRestoredReasonType(enum.Enum):
+ '''
+ Types of not restored reasons for back-forward cache.
+ '''
+ SUPPORT_PENDING = "SupportPending"
+ PAGE_SUPPORT_NEEDED = "PageSupportNeeded"
+ CIRCUMSTANTIAL = "Circumstantial"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> BackForwardCacheNotRestoredReasonType:
+ return cls(json)
+
+
+@dataclass
+class BackForwardCacheNotRestoredExplanation:
+ #: Type of the reason
+ type_: BackForwardCacheNotRestoredReasonType
+
+ #: Not restored reason
+ reason: BackForwardCacheNotRestoredReason
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['type'] = self.type_.to_json()
+ json['reason'] = self.reason.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotRestoredExplanation:
+ return cls(
+ type_=BackForwardCacheNotRestoredReasonType.from_json(json['type']),
+ reason=BackForwardCacheNotRestoredReason.from_json(json['reason']),
+ )
+
+
+@dataclass
+class BackForwardCacheNotRestoredExplanationTree:
+ #: URL of each frame
+ url: str
+
+ #: Not restored reasons of each frame
+ explanations: typing.List[BackForwardCacheNotRestoredExplanation]
+
+ #: Array of children frame
+ children: typing.List[BackForwardCacheNotRestoredExplanationTree]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ json['explanations'] = [i.to_json() for i in self.explanations]
+ json['children'] = [i.to_json() for i in self.children]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotRestoredExplanationTree:
+ return cls(
+ url=str(json['url']),
+ explanations=[BackForwardCacheNotRestoredExplanation.from_json(i) for i in json['explanations']],
+ children=[BackForwardCacheNotRestoredExplanationTree.from_json(i) for i in json['children']],
+ )
+
+
@deprecated(version="1.3")
def add_script_to_evaluate_on_load(
script_source: str
@@ -652,19 +1434,23 @@ def add_script_to_evaluate_on_load(
def add_script_to_evaluate_on_new_document(
source: str,
- world_name: typing.Optional[str] = None
+ world_name: typing.Optional[str] = None,
+ include_command_line_api: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,ScriptIdentifier]:
'''
Evaluates given script in every frame upon creation (before loading frame's scripts).
:param source:
:param world_name: **(EXPERIMENTAL)** *(Optional)* If specified, creates an isolated world with the given name and evaluates given script in it. This world name will be used as the ExecutionContextDescription::name when the corresponding event is emitted.
+ :param include_command_line_api: **(EXPERIMENTAL)** *(Optional)* Specifies whether command line API should be available to the script, defaults to false.
:returns: Identifier of the added script.
'''
params: T_JSON_DICT = dict()
params['source'] = source
if world_name is not None:
params['worldName'] = world_name
+ if include_command_line_api is not None:
+ params['includeCommandLineAPI'] = include_command_line_api
cmd_dict: T_JSON_DICT = {
'method': 'Page.addScriptToEvaluateOnNewDocument',
'params': params,
@@ -687,7 +1473,8 @@ def capture_screenshot(
format_: typing.Optional[str] = None,
quality: typing.Optional[int] = None,
clip: typing.Optional[Viewport] = None,
- from_surface: typing.Optional[bool] = None
+ from_surface: typing.Optional[bool] = None,
+ capture_beyond_viewport: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
'''
Capture page screenshot.
@@ -696,7 +1483,8 @@ def capture_screenshot(
:param quality: *(Optional)* Compression quality from range [0..100] (jpeg only).
:param clip: *(Optional)* Capture the screenshot of a given region only.
:param from_surface: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot from the surface, rather than the view. Defaults to true.
- :returns: Base64-encoded image data.
+ :param capture_beyond_viewport: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot beyond the viewport. Defaults to false.
+ :returns: Base64-encoded image data. (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
if format_ is not None:
@@ -707,6 +1495,8 @@ def capture_screenshot(
params['clip'] = clip.to_json()
if from_surface is not None:
params['fromSurface'] = from_surface
+ if capture_beyond_viewport is not None:
+ params['captureBeyondViewport'] = capture_beyond_viewport
cmd_dict: T_JSON_DICT = {
'method': 'Page.captureScreenshot',
'params': params,
@@ -741,7 +1531,7 @@ def capture_snapshot(
@deprecated(version="1.3")
def clear_device_metrics_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Clears the overriden device metrics.
+ Clears the overridden device metrics.
.. deprecated:: 1.3
@@ -771,7 +1561,7 @@ def clear_device_orientation_override() -> typing.Generator[T_JSON_DICT,T_JSON_D
@deprecated(version="1.3")
def clear_geolocation_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Clears the overriden Geolocation Position and Error.
+ Clears the overridden Geolocation Position and Error.
.. deprecated:: 1.3
'''
@@ -853,7 +1643,7 @@ def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, typing.List[AppManifestError], typing.Optional[str]]]:
+def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, typing.List[AppManifestError], typing.Optional[str], typing.Optional[AppManifestParsedProperties]]]:
'''
@@ -862,6 +1652,7 @@ def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[
0. **url** - Manifest location.
1. **errors** -
2. **data** - *(Optional)* Manifest content.
+ 3. **parsed** - *(Optional)* Parsed manifest properties
'''
cmd_dict: T_JSON_DICT = {
'method': 'Page.getAppManifest',
@@ -870,11 +1661,12 @@ def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[
return (
str(json['url']),
[AppManifestError.from_json(i) for i in json['errors']],
- str(json['data']) if 'data' in json else None
+ str(json['data']) if 'data' in json else None,
+ AppManifestParsedProperties.from_json(json['parsed']) if 'parsed' in json else None
)
-def get_installability_errors() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[str]]:
+def get_installability_errors() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[InstallabilityError]]:
'''
@@ -886,7 +1678,44 @@ def get_installability_errors() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typi
'method': 'Page.getInstallabilityErrors',
}
json = yield cmd_dict
- return [str(i) for i in json['errors']]
+ return [InstallabilityError.from_json(i) for i in json['installabilityErrors']]
+
+
+def get_manifest_icons() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[str]]:
+ '''
+
+
+ **EXPERIMENTAL**
+
+ :returns:
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.getManifestIcons',
+ }
+ json = yield cmd_dict
+ return str(json['primaryIcon']) if 'primaryIcon' in json else None
+
+
+def get_app_id() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[str], typing.Optional[str]]]:
+ '''
+ Returns the unique (PWA) app id.
+ Only returns values if the feature flag 'WebAppEnableManifestId' is enabled
+
+ **EXPERIMENTAL**
+
+ :returns: A tuple with the following items:
+
+ 0. **appId** - *(Optional)* App id, either from manifest's id attribute or computed from start_url
+ 1. **recommendedId** - *(Optional)* Recommendation for manifest's id attribute to match current id computed from start_url
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.getAppId',
+ }
+ json = yield cmd_dict
+ return (
+ str(json['appId']) if 'appId' in json else None,
+ str(json['recommendedId']) if 'recommendedId' in json else None
+ )
@deprecated(version="1.3")
@@ -921,15 +1750,18 @@ def get_frame_tree() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,FrameTree]:
return FrameTree.from_json(json['frameTree'])
-def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[LayoutViewport, VisualViewport, dom.Rect]]:
+def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[LayoutViewport, VisualViewport, dom.Rect, LayoutViewport, VisualViewport, dom.Rect]]:
'''
Returns metrics relating to the layouting of the page, such as viewport bounds/scale.
:returns: A tuple with the following items:
- 0. **layoutViewport** - Metrics relating to the layout viewport.
- 1. **visualViewport** - Metrics relating to the visual viewport.
- 2. **contentSize** - Size of scrollable area.
+ 0. **layoutViewport** - Deprecated metrics relating to the layout viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssLayoutViewport`` instead.
+ 1. **visualViewport** - Deprecated metrics relating to the visual viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssVisualViewport`` instead.
+ 2. **contentSize** - Deprecated size of scrollable area. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssContentSize`` instead.
+ 3. **cssLayoutViewport** - Metrics relating to the layout viewport in CSS pixels.
+ 4. **cssVisualViewport** - Metrics relating to the visual viewport in CSS pixels.
+ 5. **cssContentSize** - Size of scrollable area in CSS pixels.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Page.getLayoutMetrics',
@@ -938,7 +1770,10 @@ def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tupl
return (
LayoutViewport.from_json(json['layoutViewport']),
VisualViewport.from_json(json['visualViewport']),
- dom.Rect.from_json(json['contentSize'])
+ dom.Rect.from_json(json['contentSize']),
+ LayoutViewport.from_json(json['cssLayoutViewport']),
+ VisualViewport.from_json(json['cssVisualViewport']),
+ dom.Rect.from_json(json['cssContentSize'])
)
@@ -1041,7 +1876,8 @@ def navigate(
url: str,
referrer: typing.Optional[str] = None,
transition_type: typing.Optional[TransitionType] = None,
- frame_id: typing.Optional[FrameId] = None
+ frame_id: typing.Optional[FrameId] = None,
+ referrer_policy: typing.Optional[ReferrerPolicy] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[FrameId, typing.Optional[network.LoaderId], typing.Optional[str]]]:
'''
Navigates current page to the given URL.
@@ -1050,6 +1886,7 @@ def navigate(
:param referrer: *(Optional)* Referrer URL.
:param transition_type: *(Optional)* Intended transition type.
:param frame_id: *(Optional)* Frame id to navigate, if not specified navigates the top frame.
+ :param referrer_policy: **(EXPERIMENTAL)** *(Optional)* Referrer-policy used for the navigation.
:returns: A tuple with the following items:
0. **frameId** - Frame id that has navigated (or failed to navigate)
@@ -1064,6 +1901,8 @@ def navigate(
params['transitionType'] = transition_type.to_json()
if frame_id is not None:
params['frameId'] = frame_id.to_json()
+ if referrer_policy is not None:
+ params['referrerPolicy'] = referrer_policy.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Page.navigate',
'params': params,
@@ -1132,7 +1971,7 @@ def print_to_pdf(
:param transfer_mode: **(EXPERIMENTAL)** *(Optional)* return as stream
:returns: A tuple with the following items:
- 0. **data** - Base64-encoded pdf data. Empty if `` returnAsStream` is specified.
+ 0. **data** - Base64-encoded pdf data. Empty if `` returnAsStream` is specified. (Encoded as a base64 string when passed over JSON)
1. **stream** - *(Optional)* A handle of the stream that holds resulting PDF data.
'''
params: T_JSON_DICT = dict()
@@ -1332,6 +2171,48 @@ def set_bypass_csp(
json = yield cmd_dict
+def get_permissions_policy_state(
+ frame_id: FrameId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[PermissionsPolicyFeatureState]]:
+ '''
+ Get Permissions Policy state on given frame.
+
+ **EXPERIMENTAL**
+
+ :param frame_id:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.getPermissionsPolicyState',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [PermissionsPolicyFeatureState.from_json(i) for i in json['states']]
+
+
+def get_origin_trials(
+ frame_id: FrameId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[OriginTrial]]:
+ '''
+ Get Origin Trials on given frame.
+
+ **EXPERIMENTAL**
+
+ :param frame_id:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.getOriginTrials',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [OriginTrial.from_json(i) for i in json['originTrials']]
+
+
@deprecated(version="1.3")
def set_device_metrics_override(
width: int,
@@ -1426,7 +2307,8 @@ def set_device_orientation_override(
def set_font_families(
- font_families: FontFamilies
+ font_families: FontFamilies,
+ for_scripts: typing.Optional[typing.List[ScriptFontFamilies]] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Set generic font families.
@@ -1434,9 +2316,12 @@ def set_font_families(
**EXPERIMENTAL**
:param font_families: Specifies font families to set. If a font family is not specified, it won't be changed.
+ :param for_scripts: *(Optional)* Specifies font families to set for individual scripts.
'''
params: T_JSON_DICT = dict()
params['fontFamilies'] = font_families.to_json()
+ if for_scripts is not None:
+ params['forScripts'] = [i.to_json() for i in for_scripts]
cmd_dict: T_JSON_DICT = {
'method': 'Page.setFontFamilies',
'params': params,
@@ -1483,6 +2368,7 @@ def set_document_content(
json = yield cmd_dict
+@deprecated(version="1.3")
def set_download_behavior(
behavior: str,
download_path: typing.Optional[str] = None
@@ -1490,10 +2376,12 @@ def set_download_behavior(
'''
Set the behavior when downloading a file.
+ .. deprecated:: 1.3
+
**EXPERIMENTAL**
:param behavior: Whether to allow all or deny all download requests, or use default Chrome behavior if available (otherwise deny).
- :param download_path: *(Optional)* The default path to save downloaded files to. This is requred if behavior is set to 'allow'
+ :param download_path: *(Optional)* The default path to save downloaded files to. This is required if behavior is set to 'allow'
'''
params: T_JSON_DICT = dict()
params['behavior'] = behavior
@@ -1684,20 +2572,25 @@ def stop_screencast() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def set_produce_compilation_cache(
- enabled: bool
+def produce_compilation_cache(
+ scripts: typing.List[CompilationCacheParams]
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Forces compilation cache to be generated for every subresource script.
+ Requests backend to produce compilation cache for the specified scripts.
+ ``scripts`` are appeneded to the list of scripts for which the cache
+ would be produced. The list may be reset during page navigation.
+ When script with a matching URL is encountered, the cache is optionally
+ produced upon backend discretion, based on internal heuristics.
+ See also: ``Page.compilationCacheProduced``.
**EXPERIMENTAL**
- :param enabled:
+ :param scripts:
'''
params: T_JSON_DICT = dict()
- params['enabled'] = enabled
+ params['scripts'] = [i.to_json() for i in scripts]
cmd_dict: T_JSON_DICT = {
- 'method': 'Page.setProduceCompilationCache',
+ 'method': 'Page.produceCompilationCache',
'params': params,
}
json = yield cmd_dict
@@ -1714,7 +2607,7 @@ def add_compilation_cache(
**EXPERIMENTAL**
:param url:
- :param data: Base64-encoded data
+ :param data: Base64-encoded data (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
params['url'] = url
@@ -1738,6 +2631,26 @@ def clear_compilation_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
+def set_spc_transaction_mode(
+ mode: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets the Secure Payment Confirmation transaction mode.
+ https://w3c.github.io/secure-payment-confirmation/#sctn-automation-set-spc-transaction-mode
+
+ **EXPERIMENTAL**
+
+ :param mode:
+ '''
+ params: T_JSON_DICT = dict()
+ params['mode'] = mode
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.setSPCTransactionMode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def generate_test_report(
message: str,
group: typing.Optional[str] = None
@@ -1780,7 +2693,6 @@ def set_intercept_file_chooser_dialog(
Intercept file chooser requests and transfer control to protocol clients.
When file chooser interception is enabled, native file chooser dialog is not shown.
Instead, a protocol event ``Page.fileChooserOpened`` is emitted.
- File chooser can be handled with ``page.handleFileChooser`` command.
**EXPERIMENTAL**
@@ -1795,29 +2707,6 @@ def set_intercept_file_chooser_dialog(
json = yield cmd_dict
-def handle_file_chooser(
- action: str,
- files: typing.Optional[typing.List[str]] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Accepts or cancels an intercepted file chooser dialog.
-
- **EXPERIMENTAL**
-
- :param action:
- :param files: *(Optional)* Array of absolute file paths to set, only respected with ```accept``` action.
- '''
- params: T_JSON_DICT = dict()
- params['action'] = action
- if files is not None:
- params['files'] = [i for i in files]
- cmd_dict: T_JSON_DICT = {
- 'method': 'Page.handleFileChooser',
- 'params': params,
- }
- json = yield cmd_dict
-
-
@event_class('Page.domContentEventFired')
@dataclass
class DomContentEventFired:
@@ -1836,11 +2725,18 @@ class FileChooserOpened:
'''
Emitted only when ``page.interceptFileChooser`` is enabled.
'''
+ #: Id of the frame containing input node.
+ frame_id: FrameId
+ #: Input node id.
+ backend_node_id: dom.BackendNodeId
+ #: Input mode.
mode: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FileChooserOpened:
return cls(
+ frame_id=FrameId.from_json(json['frameId']),
+ backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']),
mode=str(json['mode'])
)
@@ -1892,11 +2788,13 @@ class FrameDetached:
'''
#: Id of the frame that has been detached.
frame_id: FrameId
+ reason: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FrameDetached:
return cls(
- frame_id=FrameId.from_json(json['frameId'])
+ frame_id=FrameId.from_json(json['frameId']),
+ reason=str(json['reason'])
)
@@ -1908,9 +2806,29 @@ class FrameNavigated:
'''
#: Frame object.
frame: Frame
+ type_: NavigationType
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FrameNavigated:
+ return cls(
+ frame=Frame.from_json(json['frame']),
+ type_=NavigationType.from_json(json['type'])
+ )
+
+
+@event_class('Page.documentOpened')
+@dataclass
+class DocumentOpened:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when opening document to write to.
+ '''
+ #: Frame object.
+ frame: Frame
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DocumentOpened:
return cls(
frame=Frame.from_json(json['frame'])
)
@@ -1948,13 +2866,16 @@ class FrameRequestedNavigation:
reason: ClientNavigationReason
#: The destination URL for the requested navigation.
url: str
+ #: The disposition for the navigation.
+ disposition: ClientNavigationDisposition
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FrameRequestedNavigation:
return cls(
frame_id=FrameId.from_json(json['frameId']),
reason=ClientNavigationReason.from_json(json['reason']),
- url=str(json['url'])
+ url=str(json['url']),
+ disposition=ClientNavigationDisposition.from_json(json['disposition'])
)
@@ -1971,7 +2892,7 @@ class FrameScheduledNavigation:
#: guaranteed to start.
delay: float
#: The reason for the navigation.
- reason: str
+ reason: ClientNavigationReason
#: The destination URL for the scheduled navigation.
url: str
@@ -1980,7 +2901,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameScheduledNavigation:
return cls(
frame_id=FrameId.from_json(json['frameId']),
delay=float(json['delay']),
- reason=str(json['reason']),
+ reason=ClientNavigationReason.from_json(json['reason']),
url=str(json['url'])
)
@@ -2021,6 +2942,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameStoppedLoading:
)
+@deprecated(version="1.3")
@event_class('Page.downloadWillBegin')
@dataclass
class DownloadWillBegin:
@@ -2028,17 +2950,53 @@ class DownloadWillBegin:
**EXPERIMENTAL**
Fired when page is about to start a download.
+ Deprecated. Use Browser.downloadWillBegin instead.
'''
#: Id of the frame that caused download to begin.
frame_id: FrameId
+ #: Global unique identifier of the download.
+ guid: str
#: URL of the resource being downloaded.
url: str
+ #: Suggested file name of the resource (the actual name of the file saved on disk may differ).
+ suggested_filename: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> DownloadWillBegin:
return cls(
frame_id=FrameId.from_json(json['frameId']),
- url=str(json['url'])
+ guid=str(json['guid']),
+ url=str(json['url']),
+ suggested_filename=str(json['suggestedFilename'])
+ )
+
+
+@deprecated(version="1.3")
+@event_class('Page.downloadProgress')
+@dataclass
+class DownloadProgress:
+ '''
+ **EXPERIMENTAL**
+
+ Fired when download makes progress. Last call has ``done`` == true.
+ Deprecated. Use Browser.downloadProgress instead.
+ '''
+ #: Global unique identifier of the download.
+ guid: str
+ #: Total expected bytes to download.
+ total_bytes: float
+ #: Total bytes received.
+ received_bytes: float
+ #: Download status.
+ state: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DownloadProgress:
+ return cls(
+ guid=str(json['guid']),
+ total_bytes=float(json['totalBytes']),
+ received_bytes=float(json['receivedBytes']),
+ state=str(json['state'])
)
@@ -2146,6 +3104,36 @@ def from_json(cls, json: T_JSON_DICT) -> LifecycleEvent:
)
+@event_class('Page.backForwardCacheNotUsed')
+@dataclass
+class BackForwardCacheNotUsed:
+ '''
+ **EXPERIMENTAL**
+
+ Fired for failed bfcache history navigations if BackForwardCache feature is enabled. Do
+ not assume any ordering with the Page.frameNavigated event. This event is fired only for
+ main-frame history navigation where the document changes (non-same-document navigations),
+ when bfcache navigation fails.
+ '''
+ #: The loader id for the associated navgation.
+ loader_id: network.LoaderId
+ #: The frame id of the associated frame.
+ frame_id: FrameId
+ #: Array of reasons why the page could not be cached. This must not be empty.
+ not_restored_explanations: typing.List[BackForwardCacheNotRestoredExplanation]
+ #: Tree structure of reasons why the page could not be cached for each frame.
+ not_restored_explanations_tree: typing.Optional[BackForwardCacheNotRestoredExplanationTree]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotUsed:
+ return cls(
+ loader_id=network.LoaderId.from_json(json['loaderId']),
+ frame_id=FrameId.from_json(json['frameId']),
+ not_restored_explanations=[BackForwardCacheNotRestoredExplanation.from_json(i) for i in json['notRestoredExplanations']],
+ not_restored_explanations_tree=BackForwardCacheNotRestoredExplanationTree.from_json(json['notRestoredExplanationsTree']) if 'notRestoredExplanationsTree' in json else None
+ )
+
+
@event_class('Page.loadEventFired')
@dataclass
class LoadEventFired:
@@ -2187,7 +3175,7 @@ class ScreencastFrame:
Compressed image data requested by the ``startScreencast``.
'''
- #: Base64-encoded compressed image.
+ #: Base64-encoded compressed image. (Encoded as a base64 string when passed over JSON)
data: str
#: Screencast frame metadata.
metadata: ScreencastFrameMetadata
@@ -2257,7 +3245,7 @@ class CompilationCacheProduced:
if Page.setGenerateCompilationCache is enabled.
'''
url: str
- #: Base64-encoded data
+ #: Base64-encoded data (Encoded as a base64 string when passed over JSON)
data: str
@classmethod
diff --git a/cdp/performance.py b/cdp/performance.py
index d229797..1d5f2b8 100644
--- a/cdp/performance.py
+++ b/cdp/performance.py
@@ -12,6 +12,9 @@
import typing
+from deprecated.sphinx import deprecated # type: ignore
+
+
@dataclass
class Metric:
'''
@@ -47,16 +50,25 @@ def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+def enable(
+ time_domain: typing.Optional[str] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enable collecting and reporting metrics.
+
+ :param time_domain: *(Optional)* Time domain to use for collecting and reporting duration metrics.
'''
+ params: T_JSON_DICT = dict()
+ if time_domain is not None:
+ params['timeDomain'] = time_domain
cmd_dict: T_JSON_DICT = {
'method': 'Performance.enable',
+ 'params': params,
}
json = yield cmd_dict
+@deprecated(version="1.3")
def set_time_domain(
time_domain: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -65,6 +77,8 @@ def set_time_domain(
Note that this must be called before enabling metrics collection. Calling
this method while metrics collection is enabled returns an error.
+ .. deprecated:: 1.3
+
**EXPERIMENTAL**
:param time_domain: Time domain
diff --git a/cdp/performance_timeline.py b/cdp/performance_timeline.py
new file mode 100644
index 0000000..ac95409
--- /dev/null
+++ b/cdp/performance_timeline.py
@@ -0,0 +1,200 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: PerformanceTimeline (experimental)
+
+from __future__ import annotations
+from cdp.util import event_class, T_JSON_DICT
+from dataclasses import dataclass
+import enum
+import typing
+
+from . import dom
+from . import network
+from . import page
+
+
+@dataclass
+class LargestContentfulPaint:
+ '''
+ See https://github.com/WICG/LargestContentfulPaint and largest_contentful_paint.idl
+ '''
+ render_time: network.TimeSinceEpoch
+
+ load_time: network.TimeSinceEpoch
+
+ #: The number of pixels being painted.
+ size: float
+
+ #: The id attribute of the element, if available.
+ element_id: typing.Optional[str] = None
+
+ #: The URL of the image (may be trimmed).
+ url: typing.Optional[str] = None
+
+ node_id: typing.Optional[dom.BackendNodeId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['renderTime'] = self.render_time.to_json()
+ json['loadTime'] = self.load_time.to_json()
+ json['size'] = self.size
+ if self.element_id is not None:
+ json['elementId'] = self.element_id
+ if self.url is not None:
+ json['url'] = self.url
+ if self.node_id is not None:
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LargestContentfulPaint:
+ return cls(
+ render_time=network.TimeSinceEpoch.from_json(json['renderTime']),
+ load_time=network.TimeSinceEpoch.from_json(json['loadTime']),
+ size=float(json['size']),
+ element_id=str(json['elementId']) if 'elementId' in json else None,
+ url=str(json['url']) if 'url' in json else None,
+ node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None,
+ )
+
+
+@dataclass
+class LayoutShiftAttribution:
+ previous_rect: dom.Rect
+
+ current_rect: dom.Rect
+
+ node_id: typing.Optional[dom.BackendNodeId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['previousRect'] = self.previous_rect.to_json()
+ json['currentRect'] = self.current_rect.to_json()
+ if self.node_id is not None:
+ json['nodeId'] = self.node_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LayoutShiftAttribution:
+ return cls(
+ previous_rect=dom.Rect.from_json(json['previousRect']),
+ current_rect=dom.Rect.from_json(json['currentRect']),
+ node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None,
+ )
+
+
+@dataclass
+class LayoutShift:
+ '''
+ See https://wicg.github.io/layout-instability/#sec-layout-shift and layout_shift.idl
+ '''
+ #: Score increment produced by this event.
+ value: float
+
+ had_recent_input: bool
+
+ last_input_time: network.TimeSinceEpoch
+
+ sources: typing.List[LayoutShiftAttribution]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['value'] = self.value
+ json['hadRecentInput'] = self.had_recent_input
+ json['lastInputTime'] = self.last_input_time.to_json()
+ json['sources'] = [i.to_json() for i in self.sources]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> LayoutShift:
+ return cls(
+ value=float(json['value']),
+ had_recent_input=bool(json['hadRecentInput']),
+ last_input_time=network.TimeSinceEpoch.from_json(json['lastInputTime']),
+ sources=[LayoutShiftAttribution.from_json(i) for i in json['sources']],
+ )
+
+
+@dataclass
+class TimelineEvent:
+ #: Identifies the frame that this event is related to. Empty for non-frame targets.
+ frame_id: page.FrameId
+
+ #: The event type, as specified in https://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype
+ #: This determines which of the optional "details" fiedls is present.
+ type_: str
+
+ #: Name may be empty depending on the type.
+ name: str
+
+ #: Time in seconds since Epoch, monotonically increasing within document lifetime.
+ time: network.TimeSinceEpoch
+
+ #: Event duration, if applicable.
+ duration: typing.Optional[float] = None
+
+ lcp_details: typing.Optional[LargestContentfulPaint] = None
+
+ layout_shift_details: typing.Optional[LayoutShift] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['frameId'] = self.frame_id.to_json()
+ json['type'] = self.type_
+ json['name'] = self.name
+ json['time'] = self.time.to_json()
+ if self.duration is not None:
+ json['duration'] = self.duration
+ if self.lcp_details is not None:
+ json['lcpDetails'] = self.lcp_details.to_json()
+ if self.layout_shift_details is not None:
+ json['layoutShiftDetails'] = self.layout_shift_details.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TimelineEvent:
+ return cls(
+ frame_id=page.FrameId.from_json(json['frameId']),
+ type_=str(json['type']),
+ name=str(json['name']),
+ time=network.TimeSinceEpoch.from_json(json['time']),
+ duration=float(json['duration']) if 'duration' in json else None,
+ lcp_details=LargestContentfulPaint.from_json(json['lcpDetails']) if 'lcpDetails' in json else None,
+ layout_shift_details=LayoutShift.from_json(json['layoutShiftDetails']) if 'layoutShiftDetails' in json else None,
+ )
+
+
+def enable(
+ event_types: typing.List[str]
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Previously buffered events would be reported before method returns.
+ See also: timelineEventAdded
+
+ :param event_types: The types of event to report, as specified in https://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype The specified filter overrides any previous filters, passing empty filter disables recording. Note that not all types exposed to the web platform are currently supported.
+ '''
+ params: T_JSON_DICT = dict()
+ params['eventTypes'] = [i for i in event_types]
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'PerformanceTimeline.enable',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+@event_class('PerformanceTimeline.timelineEventAdded')
+@dataclass
+class TimelineEventAdded:
+ '''
+ Sent when a performance timeline event is added. See reportPerformanceTimeline method.
+ '''
+ event: TimelineEvent
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TimelineEventAdded:
+ return cls(
+ event=TimelineEvent.from_json(json['event'])
+ )
diff --git a/cdp/profiler.py b/cdp/profiler.py
index 3dd748a..d08a84f 100644
--- a/cdp/profiler.py
+++ b/cdp/profiler.py
@@ -355,8 +355,9 @@ def start() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def start_precise_coverage(
call_count: typing.Optional[bool] = None,
- detailed: typing.Optional[bool] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ detailed: typing.Optional[bool] = None,
+ allow_triggered_updates: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,float]:
'''
Enable precise code coverage. Coverage data for JavaScript executed before enabling precise code
coverage may be incomplete. Enabling prevents running optimized code and resets execution
@@ -364,17 +365,22 @@ def start_precise_coverage(
:param call_count: *(Optional)* Collect accurate call counts beyond simple 'covered' or 'not covered'.
:param detailed: *(Optional)* Collect block-based coverage.
+ :param allow_triggered_updates: *(Optional)* Allow the backend to send updates on its own initiative
+ :returns: Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
'''
params: T_JSON_DICT = dict()
if call_count is not None:
params['callCount'] = call_count
if detailed is not None:
params['detailed'] = detailed
+ if allow_triggered_updates is not None:
+ params['allowTriggeredUpdates'] = allow_triggered_updates
cmd_dict: T_JSON_DICT = {
'method': 'Profiler.startPreciseCoverage',
'params': params,
}
json = yield cmd_dict
+ return float(json['timestamp'])
def start_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -425,18 +431,24 @@ def stop_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ScriptCoverage]]:
+def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[ScriptCoverage], float]]:
'''
Collect coverage data for the current isolate, and resets execution counters. Precise code
coverage needs to have started.
- :returns: Coverage data for the current isolate.
+ :returns: A tuple with the following items:
+
+ 0. **result** - Coverage data for the current isolate.
+ 1. **timestamp** - Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
'''
cmd_dict: T_JSON_DICT = {
'method': 'Profiler.takePreciseCoverage',
}
json = yield cmd_dict
- return [ScriptCoverage.from_json(i) for i in json['result']]
+ return (
+ [ScriptCoverage.from_json(i) for i in json['result']],
+ float(json['timestamp'])
+ )
def take_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ScriptTypeProfile]]:
@@ -493,3 +505,30 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleProfileStarted:
location=debugger.Location.from_json(json['location']),
title=str(json['title']) if 'title' in json else None
)
+
+
+@event_class('Profiler.preciseCoverageDeltaUpdate')
+@dataclass
+class PreciseCoverageDeltaUpdate:
+ '''
+ **EXPERIMENTAL**
+
+ Reports coverage delta since the last poll (either from an event like this, or from
+ ``takePreciseCoverage`` for the current isolate. May only be sent if precise code
+ coverage has been started. This event can be trigged by the embedder to, for example,
+ trigger collection of coverage data immediately at a certain point in time.
+ '''
+ #: Monotonically increasing time (in seconds) when the coverage update was taken in the backend.
+ timestamp: float
+ #: Identifier for distinguishing coverage events.
+ occasion: str
+ #: Coverage data for the current isolate.
+ result: typing.List[ScriptCoverage]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PreciseCoverageDeltaUpdate:
+ return cls(
+ timestamp=float(json['timestamp']),
+ occasion=str(json['occasion']),
+ result=[ScriptCoverage.from_json(i) for i in json['result']]
+ )
diff --git a/cdp/runtime.py b/cdp/runtime.py
index 0a22db6..1f833f8 100644
--- a/cdp/runtime.py
+++ b/cdp/runtime.py
@@ -67,6 +67,8 @@ class RemoteObject:
type_: str
#: Object subtype hint. Specified for ``object`` type values only.
+ #: NOTE: If you change anything here, make sure to also update
+ #: ``subtype`` in ``ObjectPreview`` and ``PropertyPreview`` below.
subtype: typing.Optional[str] = None
#: Object class (constructor) name. Specified for ``object`` type values only.
@@ -374,19 +376,34 @@ class PrivatePropertyDescriptor:
name: str
#: The value associated with the private property.
- value: RemoteObject
+ value: typing.Optional[RemoteObject] = None
+
+ #: A function which serves as a getter for the private property,
+ #: or ``undefined`` if there is no getter (accessor descriptors only).
+ get: typing.Optional[RemoteObject] = None
+
+ #: A function which serves as a setter for the private property,
+ #: or ``undefined`` if there is no setter (accessor descriptors only).
+ set_: typing.Optional[RemoteObject] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['name'] = self.name
- json['value'] = self.value.to_json()
+ if self.value is not None:
+ json['value'] = self.value.to_json()
+ if self.get is not None:
+ json['get'] = self.get.to_json()
+ if self.set_ is not None:
+ json['set'] = self.set_.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> PrivatePropertyDescriptor:
return cls(
name=str(json['name']),
- value=RemoteObject.from_json(json['value']),
+ value=RemoteObject.from_json(json['value']) if 'value' in json else None,
+ get=RemoteObject.from_json(json['get']) if 'get' in json else None,
+ set_=RemoteObject.from_json(json['set']) if 'set' in json else None,
)
@@ -454,6 +471,11 @@ class ExecutionContextDescription:
#: Human readable name describing given context.
name: str
+ #: A system-unique execution context identifier. Unlike the id, this is unique across
+ #: multiple processes, so can be reliably used to identify specific context while backend
+ #: performs a cross-process navigation.
+ unique_id: str
+
#: Embedder-specific auxiliary data.
aux_data: typing.Optional[dict] = None
@@ -462,6 +484,7 @@ def to_json(self) -> T_JSON_DICT:
json['id'] = self.id_.to_json()
json['origin'] = self.origin
json['name'] = self.name
+ json['uniqueId'] = self.unique_id
if self.aux_data is not None:
json['auxData'] = self.aux_data
return json
@@ -472,6 +495,7 @@ def from_json(cls, json: T_JSON_DICT) -> ExecutionContextDescription:
id_=ExecutionContextId.from_json(json['id']),
origin=str(json['origin']),
name=str(json['name']),
+ unique_id=str(json['uniqueId']),
aux_data=dict(json['auxData']) if 'auxData' in json else None,
)
@@ -509,6 +533,11 @@ class ExceptionDetails:
#: Identifier of the context where exception happened.
execution_context_id: typing.Optional[ExecutionContextId] = None
+ #: Dictionary with entries of meta data that the client associated
+ #: with this exception, such as information about associated network
+ #: requests, etc.
+ exception_meta_data: typing.Optional[dict] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['exceptionId'] = self.exception_id
@@ -525,6 +554,8 @@ def to_json(self) -> T_JSON_DICT:
json['exception'] = self.exception.to_json()
if self.execution_context_id is not None:
json['executionContextId'] = self.execution_context_id.to_json()
+ if self.exception_meta_data is not None:
+ json['exceptionMetaData'] = self.exception_meta_data
return json
@classmethod
@@ -539,6 +570,7 @@ def from_json(cls, json: T_JSON_DICT) -> ExceptionDetails:
stack_trace=StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
exception=RemoteObject.from_json(json['exception']) if 'exception' in json else None,
execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None,
+ exception_meta_data=dict(json['exceptionMetaData']) if 'exceptionMetaData' in json else None,
)
@@ -734,7 +766,8 @@ def call_function_on(
user_gesture: typing.Optional[bool] = None,
await_promise: typing.Optional[bool] = None,
execution_context_id: typing.Optional[ExecutionContextId] = None,
- object_group: typing.Optional[str] = None
+ object_group: typing.Optional[str] = None,
+ throw_on_side_effect: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]:
'''
Calls function with given declaration on the given object. Object group of the result is
@@ -750,6 +783,7 @@ def call_function_on(
:param await_promise: *(Optional)* Whether execution should ````await``` for resulting value and return once awaited promise is resolved.
:param execution_context_id: *(Optional)* Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified.
:param object_group: *(Optional)* Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object.
+ :param throw_on_side_effect: **(EXPERIMENTAL)** *(Optional)* Whether to throw an exception if side effect cannot be ruled out during evaluation.
:returns: A tuple with the following items:
0. **result** - Call result.
@@ -775,6 +809,8 @@ def call_function_on(
params['executionContextId'] = execution_context_id.to_json()
if object_group is not None:
params['objectGroup'] = object_group
+ if throw_on_side_effect is not None:
+ params['throwOnSideEffect'] = throw_on_side_effect
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.callFunctionOn',
'params': params,
@@ -864,7 +900,11 @@ def evaluate(
user_gesture: typing.Optional[bool] = None,
await_promise: typing.Optional[bool] = None,
throw_on_side_effect: typing.Optional[bool] = None,
- timeout: typing.Optional[TimeDelta] = None
+ timeout: typing.Optional[TimeDelta] = None,
+ disable_breaks: typing.Optional[bool] = None,
+ repl_mode: typing.Optional[bool] = None,
+ allow_unsafe_eval_blocked_by_csp: typing.Optional[bool] = None,
+ unique_context_id: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]:
'''
Evaluates expression on global object.
@@ -873,13 +913,17 @@ def evaluate(
:param object_group: *(Optional)* Symbolic group name that can be used to release multiple objects.
:param include_command_line_api: *(Optional)* Determines whether Command Line API should be available during the evaluation.
:param silent: *(Optional)* In silent mode exceptions thrown during evaluation are not reported and do not pause execution. Overrides ```setPauseOnException```` state.
- :param context_id: *(Optional)* Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page.
+ :param context_id: *(Optional)* Specifies in which execution context to perform evaluation. If the parameter is omitted the evaluation will be performed in the context of the inspected page. This is mutually exclusive with ````uniqueContextId````, which offers an alternative way to identify the execution context that is more reliable in a multi-process environment.
:param return_by_value: *(Optional)* Whether the result is expected to be a JSON object that should be sent by value.
:param generate_preview: **(EXPERIMENTAL)** *(Optional)* Whether preview should be generated for the result.
:param user_gesture: *(Optional)* Whether execution should be treated as initiated by user in the UI.
- :param await_promise: *(Optional)* Whether execution should ````await``` for resulting value and return once awaited promise is resolved.
- :param throw_on_side_effect: **(EXPERIMENTAL)** *(Optional)* Whether to throw an exception if side effect cannot be ruled out during evaluation.
+ :param await_promise: *(Optional)* Whether execution should ````await```` for resulting value and return once awaited promise is resolved.
+ :param throw_on_side_effect: **(EXPERIMENTAL)** *(Optional)* Whether to throw an exception if side effect cannot be ruled out during evaluation. This implies ````disableBreaks```` below.
:param timeout: **(EXPERIMENTAL)** *(Optional)* Terminate execution after timing out (number of milliseconds).
+ :param disable_breaks: **(EXPERIMENTAL)** *(Optional)* Disable breakpoints during execution.
+ :param repl_mode: **(EXPERIMENTAL)** *(Optional)* Setting this flag to true enables ````let```` re-declaration and top-level ````await````. Note that ````let```` variables can only be re-declared if they originate from ````replMode```` themselves.
+ :param allow_unsafe_eval_blocked_by_csp: **(EXPERIMENTAL)** *(Optional)* The Content Security Policy (CSP) for the target might block 'unsafe-eval' which includes eval(), Function(), setTimeout() and setInterval() when called with non-callable arguments. This flag bypasses CSP for this evaluation and allows unsafe-eval. Defaults to true.
+ :param unique_context_id: **(EXPERIMENTAL)** *(Optional)* An alternative way to specify the execution context to evaluate in. Compared to contextId that may be reused across processes, this is guaranteed to be system-unique, so it can be used to prevent accidental evaluation of the expression in context different than intended (e.g. as a result of navigation across process boundaries). This is mutually exclusive with ````contextId```.
:returns: A tuple with the following items:
0. **result** - Evaluation result.
@@ -907,6 +951,14 @@ def evaluate(
params['throwOnSideEffect'] = throw_on_side_effect
if timeout is not None:
params['timeout'] = timeout.to_json()
+ if disable_breaks is not None:
+ params['disableBreaks'] = disable_breaks
+ if repl_mode is not None:
+ params['replMode'] = repl_mode
+ if allow_unsafe_eval_blocked_by_csp is not None:
+ params['allowUnsafeEvalBlockedByCSP'] = allow_unsafe_eval_blocked_by_csp
+ if unique_context_id is not None:
+ params['uniqueContextId'] = unique_context_id
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.evaluate',
'params': params,
@@ -959,7 +1011,8 @@ def get_properties(
object_id: RemoteObjectId,
own_properties: typing.Optional[bool] = None,
accessor_properties_only: typing.Optional[bool] = None,
- generate_preview: typing.Optional[bool] = None
+ generate_preview: typing.Optional[bool] = None,
+ non_indexed_properties_only: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[PropertyDescriptor], typing.Optional[typing.List[InternalPropertyDescriptor]], typing.Optional[typing.List[PrivatePropertyDescriptor]], typing.Optional[ExceptionDetails]]]:
'''
Returns properties of a given object. Object group of the result is inherited from the target
@@ -969,6 +1022,7 @@ def get_properties(
:param own_properties: *(Optional)* If true, returns properties belonging only to the element itself, not to its prototype chain.
:param accessor_properties_only: **(EXPERIMENTAL)** *(Optional)* If true, returns accessor properties (with getter/setter) only; internal properties are not returned either.
:param generate_preview: **(EXPERIMENTAL)** *(Optional)* Whether preview should be generated for the results.
+ :param non_indexed_properties_only: **(EXPERIMENTAL)** *(Optional)* If true, returns non-indexed properties only.
:returns: A tuple with the following items:
0. **result** - Object properties.
@@ -984,6 +1038,8 @@ def get_properties(
params['accessorPropertiesOnly'] = accessor_properties_only
if generate_preview is not None:
params['generatePreview'] = generate_preview
+ if non_indexed_properties_only is not None:
+ params['nonIndexedPropertiesOnly'] = non_indexed_properties_only
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.getProperties',
'params': params,
@@ -1205,14 +1261,13 @@ def terminate_execution() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def add_binding(
name: str,
- execution_context_id: typing.Optional[ExecutionContextId] = None
+ execution_context_id: typing.Optional[ExecutionContextId] = None,
+ execution_context_name: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
If executionContextId is empty, adds binding with the given name on the
global objects of all inspected contexts, including those created later,
bindings survive reloads.
- If executionContextId is specified, adds binding only on global object of
- given execution context.
Binding function takes exactly one argument, this argument should be string,
in case of any other input, function throws an exception.
Each binding function call produces Runtime.bindingCalled notification.
@@ -1220,12 +1275,15 @@ def add_binding(
**EXPERIMENTAL**
:param name:
- :param execution_context_id: *(Optional)*
+ :param execution_context_id: **(DEPRECATED)** *(Optional)* If specified, the binding would only be exposed to the specified execution context. If omitted and ```executionContextName```` is not set, the binding is exposed to all execution contexts of the target. This parameter is mutually exclusive with ````executionContextName````. Deprecated in favor of ````executionContextName```` due to an unclear use case and bugs in implementation (crbug.com/1169639). ````executionContextId```` will be removed in the future.
+ :param execution_context_name: **(EXPERIMENTAL)** *(Optional)* If specified, the binding is exposed to the executionContext with matching name, even for contexts created after the binding is added. See also ````ExecutionContext.name```` and ````worldName```` parameter to ````Page.addScriptToEvaluateOnNewDocument````. This parameter is mutually exclusive with ````executionContextId```.
'''
params: T_JSON_DICT = dict()
params['name'] = name
if execution_context_id is not None:
params['executionContextId'] = execution_context_id.to_json()
+ if execution_context_name is not None:
+ params['executionContextName'] = execution_context_name
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.addBinding',
'params': params,
@@ -1403,10 +1461,13 @@ class InspectRequested:
'''
object_: RemoteObject
hints: dict
+ #: Identifier of the context where the call was made.
+ execution_context_id: typing.Optional[ExecutionContextId]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> InspectRequested:
return cls(
object_=RemoteObject.from_json(json['object']),
- hints=dict(json['hints'])
+ hints=dict(json['hints']),
+ execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None
)
diff --git a/cdp/security.py b/cdp/security.py
index 4f0e5a0..cbd4583 100644
--- a/cdp/security.py
+++ b/cdp/security.py
@@ -11,7 +11,7 @@
import enum
import typing
-
+from . import network
from deprecated.sphinx import deprecated # type: ignore
@@ -56,6 +56,7 @@ class SecurityState(enum.Enum):
INSECURE = "insecure"
SECURE = "secure"
INFO = "info"
+ INSECURE_BROKEN = "insecure-broken"
def to_json(self) -> str:
return self.value
@@ -65,6 +66,186 @@ def from_json(cls, json: str) -> SecurityState:
return cls(json)
+@dataclass
+class CertificateSecurityState:
+ '''
+ Details about the security state of the page certificate.
+ '''
+ #: Protocol name (e.g. "TLS 1.2" or "QUIC").
+ protocol: str
+
+ #: Key Exchange used by the connection, or the empty string if not applicable.
+ key_exchange: str
+
+ #: Cipher name.
+ cipher: str
+
+ #: Page certificate.
+ certificate: typing.List[str]
+
+ #: Certificate subject name.
+ subject_name: str
+
+ #: Name of the issuing CA.
+ issuer: str
+
+ #: Certificate valid from date.
+ valid_from: network.TimeSinceEpoch
+
+ #: Certificate valid to (expiration) date
+ valid_to: network.TimeSinceEpoch
+
+ #: True if the certificate uses a weak signature aglorithm.
+ certificate_has_weak_signature: bool
+
+ #: True if the certificate has a SHA1 signature in the chain.
+ certificate_has_sha1_signature: bool
+
+ #: True if modern SSL
+ modern_ssl: bool
+
+ #: True if the connection is using an obsolete SSL protocol.
+ obsolete_ssl_protocol: bool
+
+ #: True if the connection is using an obsolete SSL key exchange.
+ obsolete_ssl_key_exchange: bool
+
+ #: True if the connection is using an obsolete SSL cipher.
+ obsolete_ssl_cipher: bool
+
+ #: True if the connection is using an obsolete SSL signature.
+ obsolete_ssl_signature: bool
+
+ #: (EC)DH group used by the connection, if applicable.
+ key_exchange_group: typing.Optional[str] = None
+
+ #: TLS MAC. Note that AEAD ciphers do not have separate MACs.
+ mac: typing.Optional[str] = None
+
+ #: The highest priority network error code, if the certificate has an error.
+ certificate_network_error: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['protocol'] = self.protocol
+ json['keyExchange'] = self.key_exchange
+ json['cipher'] = self.cipher
+ json['certificate'] = [i for i in self.certificate]
+ json['subjectName'] = self.subject_name
+ json['issuer'] = self.issuer
+ json['validFrom'] = self.valid_from.to_json()
+ json['validTo'] = self.valid_to.to_json()
+ json['certificateHasWeakSignature'] = self.certificate_has_weak_signature
+ json['certificateHasSha1Signature'] = self.certificate_has_sha1_signature
+ json['modernSSL'] = self.modern_ssl
+ json['obsoleteSslProtocol'] = self.obsolete_ssl_protocol
+ json['obsoleteSslKeyExchange'] = self.obsolete_ssl_key_exchange
+ json['obsoleteSslCipher'] = self.obsolete_ssl_cipher
+ json['obsoleteSslSignature'] = self.obsolete_ssl_signature
+ if self.key_exchange_group is not None:
+ json['keyExchangeGroup'] = self.key_exchange_group
+ if self.mac is not None:
+ json['mac'] = self.mac
+ if self.certificate_network_error is not None:
+ json['certificateNetworkError'] = self.certificate_network_error
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CertificateSecurityState:
+ return cls(
+ protocol=str(json['protocol']),
+ key_exchange=str(json['keyExchange']),
+ cipher=str(json['cipher']),
+ certificate=[str(i) for i in json['certificate']],
+ subject_name=str(json['subjectName']),
+ issuer=str(json['issuer']),
+ valid_from=network.TimeSinceEpoch.from_json(json['validFrom']),
+ valid_to=network.TimeSinceEpoch.from_json(json['validTo']),
+ certificate_has_weak_signature=bool(json['certificateHasWeakSignature']),
+ certificate_has_sha1_signature=bool(json['certificateHasSha1Signature']),
+ modern_ssl=bool(json['modernSSL']),
+ obsolete_ssl_protocol=bool(json['obsoleteSslProtocol']),
+ obsolete_ssl_key_exchange=bool(json['obsoleteSslKeyExchange']),
+ obsolete_ssl_cipher=bool(json['obsoleteSslCipher']),
+ obsolete_ssl_signature=bool(json['obsoleteSslSignature']),
+ key_exchange_group=str(json['keyExchangeGroup']) if 'keyExchangeGroup' in json else None,
+ mac=str(json['mac']) if 'mac' in json else None,
+ certificate_network_error=str(json['certificateNetworkError']) if 'certificateNetworkError' in json else None,
+ )
+
+
+class SafetyTipStatus(enum.Enum):
+ BAD_REPUTATION = "badReputation"
+ LOOKALIKE = "lookalike"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SafetyTipStatus:
+ return cls(json)
+
+
+@dataclass
+class SafetyTipInfo:
+ #: Describes whether the page triggers any safety tips or reputation warnings. Default is unknown.
+ safety_tip_status: SafetyTipStatus
+
+ #: The URL the safety tip suggested ("Did you mean?"). Only filled in for lookalike matches.
+ safe_url: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['safetyTipStatus'] = self.safety_tip_status.to_json()
+ if self.safe_url is not None:
+ json['safeUrl'] = self.safe_url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SafetyTipInfo:
+ return cls(
+ safety_tip_status=SafetyTipStatus.from_json(json['safetyTipStatus']),
+ safe_url=str(json['safeUrl']) if 'safeUrl' in json else None,
+ )
+
+
+@dataclass
+class VisibleSecurityState:
+ '''
+ Security state information about the page.
+ '''
+ #: The security level of the page.
+ security_state: SecurityState
+
+ #: Array of security state issues ids.
+ security_state_issue_ids: typing.List[str]
+
+ #: Security state details about the page certificate.
+ certificate_security_state: typing.Optional[CertificateSecurityState] = None
+
+ #: The type of Safety Tip triggered on the page. Note that this field will be set even if the Safety Tip UI was not actually shown.
+ safety_tip_info: typing.Optional[SafetyTipInfo] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['securityState'] = self.security_state.to_json()
+ json['securityStateIssueIds'] = [i for i in self.security_state_issue_ids]
+ if self.certificate_security_state is not None:
+ json['certificateSecurityState'] = self.certificate_security_state.to_json()
+ if self.safety_tip_info is not None:
+ json['safetyTipInfo'] = self.safety_tip_info.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> VisibleSecurityState:
+ return cls(
+ security_state=SecurityState.from_json(json['securityState']),
+ security_state_issue_ids=[str(i) for i in json['securityStateIssueIds']],
+ certificate_security_state=CertificateSecurityState.from_json(json['certificateSecurityState']) if 'certificateSecurityState' in json else None,
+ safety_tip_info=SafetyTipInfo.from_json(json['safetyTipInfo']) if 'safetyTipInfo' in json else None,
+ )
+
+
@dataclass
class SecurityStateExplanation:
'''
@@ -291,22 +472,41 @@ def from_json(cls, json: T_JSON_DICT) -> CertificateError:
)
+@event_class('Security.visibleSecurityStateChanged')
+@dataclass
+class VisibleSecurityStateChanged:
+ '''
+ **EXPERIMENTAL**
+
+ The security state of the page changed.
+ '''
+ #: Security state information about the page.
+ visible_security_state: VisibleSecurityState
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> VisibleSecurityStateChanged:
+ return cls(
+ visible_security_state=VisibleSecurityState.from_json(json['visibleSecurityState'])
+ )
+
+
+@deprecated(version="1.3")
@event_class('Security.securityStateChanged')
@dataclass
class SecurityStateChanged:
'''
- The security state of the page changed.
+ The security state of the page changed. No longer being sent.
'''
#: Security state.
security_state: SecurityState
#: True if the page was loaded over cryptographic transport such as HTTPS.
scheme_is_cryptographic: bool
- #: List of explanations for the security state. If the overall security state is ``insecure`` or
- #: ``warning``, at least one corresponding explanation should be included.
+ #: Previously a list of explanations for the security state. Now always
+ #: empty.
explanations: typing.List[SecurityStateExplanation]
#: Information about insecure content on the page.
insecure_content_status: InsecureContentStatus
- #: Overrides user-visible description of the state.
+ #: Overrides user-visible description of the state. Always omitted.
summary: typing.Optional[str]
@classmethod
diff --git a/cdp/service_worker.py b/cdp/service_worker.py
index c6a8ff8..4bbee18 100644
--- a/cdp/service_worker.py
+++ b/cdp/service_worker.py
@@ -233,6 +233,27 @@ def dispatch_sync_event(
json = yield cmd_dict
+def dispatch_periodic_sync_event(
+ origin: str,
+ registration_id: RegistrationID,
+ tag: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param origin:
+ :param registration_id:
+ :param tag:
+ '''
+ params: T_JSON_DICT = dict()
+ params['origin'] = origin
+ params['registrationId'] = registration_id.to_json()
+ params['tag'] = tag
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'ServiceWorker.dispatchPeriodicSyncEvent',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
cmd_dict: T_JSON_DICT = {
diff --git a/cdp/storage.py b/cdp/storage.py
index 80f4b88..9c0a2ae 100644
--- a/cdp/storage.py
+++ b/cdp/storage.py
@@ -11,6 +11,9 @@
import enum
import typing
+from . import browser
+from . import network
+
class StorageType(enum.Enum):
'''
@@ -25,6 +28,7 @@ class StorageType(enum.Enum):
WEBSQL = "websql"
SERVICE_WORKERS = "service_workers"
CACHE_STORAGE = "cache_storage"
+ INTEREST_GROUPS = "interest_groups"
ALL_ = "all"
OTHER = "other"
@@ -61,6 +65,140 @@ def from_json(cls, json: T_JSON_DICT) -> UsageForType:
)
+@dataclass
+class TrustTokens:
+ '''
+ Pair of issuer origin and number of available (signed, but not used) Trust
+ Tokens from that issuer.
+ '''
+ issuer_origin: str
+
+ count: float
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['issuerOrigin'] = self.issuer_origin
+ json['count'] = self.count
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TrustTokens:
+ return cls(
+ issuer_origin=str(json['issuerOrigin']),
+ count=float(json['count']),
+ )
+
+
+class InterestGroupAccessType(enum.Enum):
+ '''
+ Enum of interest group access types.
+ '''
+ JOIN = "join"
+ LEAVE = "leave"
+ UPDATE = "update"
+ BID = "bid"
+ WIN = "win"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> InterestGroupAccessType:
+ return cls(json)
+
+
+@dataclass
+class InterestGroupAd:
+ '''
+ Ad advertising element inside an interest group.
+ '''
+ render_url: str
+
+ metadata: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['renderUrl'] = self.render_url
+ if self.metadata is not None:
+ json['metadata'] = self.metadata
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InterestGroupAd:
+ return cls(
+ render_url=str(json['renderUrl']),
+ metadata=str(json['metadata']) if 'metadata' in json else None,
+ )
+
+
+@dataclass
+class InterestGroupDetails:
+ '''
+ The full details of an interest group.
+ '''
+ owner_origin: str
+
+ name: str
+
+ expiration_time: network.TimeSinceEpoch
+
+ joining_origin: str
+
+ trusted_bidding_signals_keys: typing.List[str]
+
+ ads: typing.List[InterestGroupAd]
+
+ ad_components: typing.List[InterestGroupAd]
+
+ bidding_url: typing.Optional[str] = None
+
+ bidding_wasm_helper_url: typing.Optional[str] = None
+
+ update_url: typing.Optional[str] = None
+
+ trusted_bidding_signals_url: typing.Optional[str] = None
+
+ user_bidding_signals: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['ownerOrigin'] = self.owner_origin
+ json['name'] = self.name
+ json['expirationTime'] = self.expiration_time.to_json()
+ json['joiningOrigin'] = self.joining_origin
+ json['trustedBiddingSignalsKeys'] = [i for i in self.trusted_bidding_signals_keys]
+ json['ads'] = [i.to_json() for i in self.ads]
+ json['adComponents'] = [i.to_json() for i in self.ad_components]
+ if self.bidding_url is not None:
+ json['biddingUrl'] = self.bidding_url
+ if self.bidding_wasm_helper_url is not None:
+ json['biddingWasmHelperUrl'] = self.bidding_wasm_helper_url
+ if self.update_url is not None:
+ json['updateUrl'] = self.update_url
+ if self.trusted_bidding_signals_url is not None:
+ json['trustedBiddingSignalsUrl'] = self.trusted_bidding_signals_url
+ if self.user_bidding_signals is not None:
+ json['userBiddingSignals'] = self.user_bidding_signals
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InterestGroupDetails:
+ return cls(
+ owner_origin=str(json['ownerOrigin']),
+ name=str(json['name']),
+ expiration_time=network.TimeSinceEpoch.from_json(json['expirationTime']),
+ joining_origin=str(json['joiningOrigin']),
+ trusted_bidding_signals_keys=[str(i) for i in json['trustedBiddingSignalsKeys']],
+ ads=[InterestGroupAd.from_json(i) for i in json['ads']],
+ ad_components=[InterestGroupAd.from_json(i) for i in json['adComponents']],
+ bidding_url=str(json['biddingUrl']) if 'biddingUrl' in json else None,
+ bidding_wasm_helper_url=str(json['biddingWasmHelperUrl']) if 'biddingWasmHelperUrl' in json else None,
+ update_url=str(json['updateUrl']) if 'updateUrl' in json else None,
+ trusted_bidding_signals_url=str(json['trustedBiddingSignalsUrl']) if 'trustedBiddingSignalsUrl' in json else None,
+ user_bidding_signals=str(json['userBiddingSignals']) if 'userBiddingSignals' in json else None,
+ )
+
+
def clear_data_for_origin(
origin: str,
storage_types: str
@@ -81,9 +219,68 @@ def clear_data_for_origin(
json = yield cmd_dict
+def get_cookies(
+ browser_context_id: typing.Optional[browser.BrowserContextID] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[network.Cookie]]:
+ '''
+ Returns all browser cookies.
+
+ :param browser_context_id: *(Optional)* Browser context to use when called on the browser endpoint.
+ :returns: Array of cookie objects.
+ '''
+ params: T_JSON_DICT = dict()
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getCookies',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [network.Cookie.from_json(i) for i in json['cookies']]
+
+
+def set_cookies(
+ cookies: typing.List[network.CookieParam],
+ browser_context_id: typing.Optional[browser.BrowserContextID] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets given cookies.
+
+ :param cookies: Cookies to be set.
+ :param browser_context_id: *(Optional)* Browser context to use when called on the browser endpoint.
+ '''
+ params: T_JSON_DICT = dict()
+ params['cookies'] = [i.to_json() for i in cookies]
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.setCookies',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def clear_cookies(
+ browser_context_id: typing.Optional[browser.BrowserContextID] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears cookies.
+
+ :param browser_context_id: *(Optional)* Browser context to use when called on the browser endpoint.
+ '''
+ params: T_JSON_DICT = dict()
+ if browser_context_id is not None:
+ params['browserContextId'] = browser_context_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.clearCookies',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def get_usage_and_quota(
origin: str
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[float, float, typing.List[UsageForType]]]:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[float, float, bool, typing.List[UsageForType]]]:
'''
Returns usage and quota in bytes.
@@ -92,7 +289,8 @@ def get_usage_and_quota(
0. **usage** - Storage usage (bytes).
1. **quota** - Storage quota (bytes).
- 2. **usageBreakdown** - Storage usage per type (bytes).
+ 2. **overrideActive** - Whether or not the origin has an active storage quota override
+ 3. **usageBreakdown** - Storage usage per type (bytes).
'''
params: T_JSON_DICT = dict()
params['origin'] = origin
@@ -104,10 +302,34 @@ def get_usage_and_quota(
return (
float(json['usage']),
float(json['quota']),
+ bool(json['overrideActive']),
[UsageForType.from_json(i) for i in json['usageBreakdown']]
)
+def override_quota_for_origin(
+ origin: str,
+ quota_size: typing.Optional[float] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Override quota for the specified origin
+
+ **EXPERIMENTAL**
+
+ :param origin: Security origin.
+ :param quota_size: *(Optional)* The quota size (in bytes) to override the original quota with. If this is called multiple times, the overridden quota will be equal to the quotaSize provided in the final call. If this is called without specifying a quotaSize, the quota will be reset to the default value for the specified origin. If this is called multiple times with different origins, the override will be maintained for each origin until it is disabled (called without a quotaSize).
+ '''
+ params: T_JSON_DICT = dict()
+ params['origin'] = origin
+ if quota_size is not None:
+ params['quotaSize'] = quota_size
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.overrideQuotaForOrigin',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def track_cache_storage_for_origin(
origin: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -176,6 +398,87 @@ def untrack_indexed_db_for_origin(
json = yield cmd_dict
+def get_trust_tokens() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TrustTokens]]:
+ '''
+ Returns the number of stored Trust Tokens per issuer for the
+ current browsing context.
+
+ **EXPERIMENTAL**
+
+ :returns:
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getTrustTokens',
+ }
+ json = yield cmd_dict
+ return [TrustTokens.from_json(i) for i in json['tokens']]
+
+
+def clear_trust_tokens(
+ issuer_origin: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+ '''
+ Removes all Trust Tokens issued by the provided issuerOrigin.
+ Leaves other stored data, including the issuer's Redemption Records, intact.
+
+ **EXPERIMENTAL**
+
+ :param issuer_origin:
+ :returns: True if any tokens were deleted, false otherwise.
+ '''
+ params: T_JSON_DICT = dict()
+ params['issuerOrigin'] = issuer_origin
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.clearTrustTokens',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return bool(json['didDeleteTokens'])
+
+
+def get_interest_group_details(
+ owner_origin: str,
+ name: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,InterestGroupDetails]:
+ '''
+ Gets details for a named interest group.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ :param name:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ params['name'] = name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getInterestGroupDetails',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return InterestGroupDetails.from_json(json['details'])
+
+
+def set_interest_group_tracking(
+ enable: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables/Disables issuing of interestGroupAccessed events.
+
+ **EXPERIMENTAL**
+
+ :param enable:
+ '''
+ params: T_JSON_DICT = dict()
+ params['enable'] = enable
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.setInterestGroupTracking',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
@event_class('Storage.cacheStorageContentUpdated')
@dataclass
class CacheStorageContentUpdated:
@@ -247,3 +550,24 @@ def from_json(cls, json: T_JSON_DICT) -> IndexedDBListUpdated:
return cls(
origin=str(json['origin'])
)
+
+
+@event_class('Storage.interestGroupAccessed')
+@dataclass
+class InterestGroupAccessed:
+ '''
+ One of the interest groups was accessed by the associated page.
+ '''
+ access_time: network.TimeSinceEpoch
+ type_: InterestGroupAccessType
+ owner_origin: str
+ name: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InterestGroupAccessed:
+ return cls(
+ access_time=network.TimeSinceEpoch.from_json(json['accessTime']),
+ type_=InterestGroupAccessType.from_json(json['type']),
+ owner_origin=str(json['ownerOrigin']),
+ name=str(json['name'])
+ )
diff --git a/cdp/system_info.py b/cdp/system_info.py
index 71cf4d7..a85621f 100644
--- a/cdp/system_info.py
+++ b/cdp/system_info.py
@@ -35,6 +35,12 @@ class GPUDevice:
#: String description of the GPU driver version.
driver_version: str
+ #: Sub sys ID of the GPU, only available on Windows.
+ sub_sys_id: typing.Optional[float] = None
+
+ #: Revision of the GPU, only available on Windows.
+ revision: typing.Optional[float] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['vendorId'] = self.vendor_id
@@ -43,6 +49,10 @@ def to_json(self) -> T_JSON_DICT:
json['deviceString'] = self.device_string
json['driverVendor'] = self.driver_vendor
json['driverVersion'] = self.driver_version
+ if self.sub_sys_id is not None:
+ json['subSysId'] = self.sub_sys_id
+ if self.revision is not None:
+ json['revision'] = self.revision
return json
@classmethod
@@ -54,6 +64,8 @@ def from_json(cls, json: T_JSON_DICT) -> GPUDevice:
device_string=str(json['deviceString']),
driver_vendor=str(json['driverVendor']),
driver_version=str(json['driverVersion']),
+ sub_sys_id=float(json['subSysId']) if 'subSysId' in json else None,
+ revision=float(json['revision']) if 'revision' in json else None,
)
@@ -166,6 +178,22 @@ def from_json(cls, json: str) -> SubsamplingFormat:
return cls(json)
+class ImageType(enum.Enum):
+ '''
+ Image format of a given image.
+ '''
+ JPEG = "jpeg"
+ WEBP = "webp"
+ UNKNOWN = "unknown"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ImageType:
+ return cls(json)
+
+
@dataclass
class ImageDecodeAcceleratorCapability:
'''
@@ -173,7 +201,7 @@ class ImageDecodeAcceleratorCapability:
maximum resolutions and subsampling.
'''
#: Image coded, e.g. Jpeg.
- image_type: str
+ image_type: ImageType
#: Maximum supported dimensions of the image in pixels.
max_dimensions: Size
@@ -186,7 +214,7 @@ class ImageDecodeAcceleratorCapability:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['imageType'] = self.image_type
+ json['imageType'] = self.image_type.to_json()
json['maxDimensions'] = self.max_dimensions.to_json()
json['minDimensions'] = self.min_dimensions.to_json()
json['subsamplings'] = [i.to_json() for i in self.subsamplings]
@@ -195,7 +223,7 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ImageDecodeAcceleratorCapability:
return cls(
- image_type=str(json['imageType']),
+ image_type=ImageType.from_json(json['imageType']),
max_dimensions=Size.from_json(json['maxDimensions']),
min_dimensions=Size.from_json(json['minDimensions']),
subsamplings=[SubsamplingFormat.from_json(i) for i in json['subsamplings']],
diff --git a/cdp/target.py b/cdp/target.py
index 4df4519..5e3d4a2 100644
--- a/cdp/target.py
+++ b/cdp/target.py
@@ -11,6 +11,10 @@
import enum
import typing
+from . import browser
+from . import page
+from deprecated.sphinx import deprecated # type: ignore
+
class TargetID(str):
def to_json(self) -> str:
@@ -39,18 +43,6 @@ def __repr__(self):
return 'SessionID({})'.format(super().__repr__())
-class BrowserContextID(str):
- def to_json(self) -> str:
- return self
-
- @classmethod
- def from_json(cls, json: str) -> BrowserContextID:
- return cls(json)
-
- def __repr__(self):
- return 'BrowserContextID({})'.format(super().__repr__())
-
-
@dataclass
class TargetInfo:
target_id: TargetID
@@ -64,10 +56,16 @@ class TargetInfo:
#: Whether the target has an attached client.
attached: bool
+ #: Whether the target has access to the originating window.
+ can_access_opener: bool
+
#: Opener target Id
opener_id: typing.Optional[TargetID] = None
- browser_context_id: typing.Optional[BrowserContextID] = None
+ #: Frame id of originating window (is only set if target has an opener).
+ opener_frame_id: typing.Optional[page.FrameId] = None
+
+ browser_context_id: typing.Optional[browser.BrowserContextID] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
@@ -76,8 +74,11 @@ def to_json(self) -> T_JSON_DICT:
json['title'] = self.title
json['url'] = self.url
json['attached'] = self.attached
+ json['canAccessOpener'] = self.can_access_opener
if self.opener_id is not None:
json['openerId'] = self.opener_id.to_json()
+ if self.opener_frame_id is not None:
+ json['openerFrameId'] = self.opener_frame_id.to_json()
if self.browser_context_id is not None:
json['browserContextId'] = self.browser_context_id.to_json()
return json
@@ -90,8 +91,10 @@ def from_json(cls, json: T_JSON_DICT) -> TargetInfo:
title=str(json['title']),
url=str(json['url']),
attached=bool(json['attached']),
+ can_access_opener=bool(json['canAccessOpener']),
opener_id=TargetID.from_json(json['openerId']) if 'openerId' in json else None,
- browser_context_id=BrowserContextID.from_json(json['browserContextId']) if 'browserContextId' in json else None,
+ opener_frame_id=page.FrameId.from_json(json['openerFrameId']) if 'openerFrameId' in json else None,
+ browser_context_id=browser.BrowserContextID.from_json(json['browserContextId']) if 'browserContextId' in json else None,
)
@@ -140,7 +143,7 @@ def attach_to_target(
Attaches to the target with given id.
:param target_id:
- :param flatten: **(EXPERIMENTAL)** *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands.
+ :param flatten: *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands. We plan to make this the default, deprecate non-flattened mode, and eventually retire it. See crbug.com/991325.
:returns: Id assigned to the session.
'''
params: T_JSON_DICT = dict()
@@ -177,7 +180,7 @@ def close_target(
Closes the target. If the target is a page that gets closed too.
:param target_id:
- :returns:
+ :returns: Always set to true. If an error occurs, the response indicates protocol error.
'''
params: T_JSON_DICT = dict()
params['targetId'] = target_id.to_json()
@@ -219,23 +222,42 @@ def expose_dev_tools_protocol(
json = yield cmd_dict
-def create_browser_context() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,BrowserContextID]:
+def create_browser_context(
+ dispose_on_detach: typing.Optional[bool] = None,
+ proxy_server: typing.Optional[str] = None,
+ proxy_bypass_list: typing.Optional[str] = None,
+ origins_with_universal_network_access: typing.Optional[typing.List[str]] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,browser.BrowserContextID]:
'''
Creates a new empty BrowserContext. Similar to an incognito profile but you can have more than
one.
**EXPERIMENTAL**
+ :param dispose_on_detach: *(Optional)* If specified, disposes this context when debugging session disconnects.
+ :param proxy_server: *(Optional)* Proxy server, similar to the one passed to --proxy-server
+ :param proxy_bypass_list: *(Optional)* Proxy bypass list, similar to the one passed to --proxy-bypass-list
+ :param origins_with_universal_network_access: *(Optional)* An optional list of origins to grant unlimited cross-origin access to. Parts of the URL other than those constituting origin are ignored.
:returns: The id of the context created.
'''
+ params: T_JSON_DICT = dict()
+ if dispose_on_detach is not None:
+ params['disposeOnDetach'] = dispose_on_detach
+ if proxy_server is not None:
+ params['proxyServer'] = proxy_server
+ if proxy_bypass_list is not None:
+ params['proxyBypassList'] = proxy_bypass_list
+ if origins_with_universal_network_access is not None:
+ params['originsWithUniversalNetworkAccess'] = [i for i in origins_with_universal_network_access]
cmd_dict: T_JSON_DICT = {
'method': 'Target.createBrowserContext',
+ 'params': params,
}
json = yield cmd_dict
- return BrowserContextID.from_json(json['browserContextId'])
+ return browser.BrowserContextID.from_json(json['browserContextId'])
-def get_browser_contexts() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[BrowserContextID]]:
+def get_browser_contexts() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[browser.BrowserContextID]]:
'''
Returns all browser contexts created with ``Target.createBrowserContext`` method.
@@ -247,14 +269,14 @@ def get_browser_contexts() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Li
'method': 'Target.getBrowserContexts',
}
json = yield cmd_dict
- return [BrowserContextID.from_json(i) for i in json['browserContextIds']]
+ return [browser.BrowserContextID.from_json(i) for i in json['browserContextIds']]
def create_target(
url: str,
width: typing.Optional[int] = None,
height: typing.Optional[int] = None,
- browser_context_id: typing.Optional[BrowserContextID] = None,
+ browser_context_id: typing.Optional[browser.BrowserContextID] = None,
enable_begin_frame_control: typing.Optional[bool] = None,
new_window: typing.Optional[bool] = None,
background: typing.Optional[bool] = None
@@ -262,10 +284,10 @@ def create_target(
'''
Creates a new page.
- :param url: The initial URL the page will be navigated to.
+ :param url: The initial URL the page will be navigated to. An empty string indicates about:blank.
:param width: *(Optional)* Frame width in DIP (headless chrome only).
:param height: *(Optional)* Frame height in DIP (headless chrome only).
- :param browser_context_id: *(Optional)* The browser context to create the page in.
+ :param browser_context_id: **(EXPERIMENTAL)** *(Optional)* The browser context to create the page in.
:param enable_begin_frame_control: **(EXPERIMENTAL)** *(Optional)* Whether BeginFrames for this target will be controlled via DevTools (headless chrome only, not supported on MacOS yet, false by default).
:param new_window: *(Optional)* Whether to create a new Window or Tab (chrome-only, false by default).
:param background: *(Optional)* Whether to create the target in background or foreground (chrome-only, false by default).
@@ -316,7 +338,7 @@ def detach_from_target(
def dispose_browser_context(
- browser_context_id: BrowserContextID
+ browser_context_id: browser.BrowserContextID
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Deletes a BrowserContext. All the belonging pages will be closed without calling their
@@ -370,6 +392,7 @@ def get_targets() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Target
return [TargetInfo.from_json(i) for i in json['targetInfos']]
+@deprecated(version="1.3")
def send_message_to_target(
message: str,
session_id: typing.Optional[SessionID] = None,
@@ -377,6 +400,10 @@ def send_message_to_target(
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Sends protocol message over session with given id.
+ Consider using flat mode instead; see commands attachToTarget, setAutoAttach,
+ and crbug.com/991325.
+
+ .. deprecated:: 1.3
:param message:
:param session_id: *(Optional)* Identifier of the session.
@@ -404,12 +431,14 @@ def set_auto_attach(
Controls whether to automatically attach to new targets which are considered to be related to
this one. When turned on, attaches to all existing related targets as well. When turned off,
automatically detaches from all currently attached targets.
+ This also clears all targets added by ``autoAttachRelated`` from the list of targets to watch
+ for creation of related targets.
**EXPERIMENTAL**
:param auto_attach: Whether to auto-attach to related targets.
:param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets.
- :param flatten: **(EXPERIMENTAL)** *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands.
+ :param flatten: *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands. We plan to make this the default, deprecate non-flattened mode, and eventually retire it. See crbug.com/991325.
'''
params: T_JSON_DICT = dict()
params['autoAttach'] = auto_attach
@@ -423,6 +452,32 @@ def set_auto_attach(
json = yield cmd_dict
+def auto_attach_related(
+ target_id: TargetID,
+ wait_for_debugger_on_start: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Adds the specified target to the list of targets that will be monitored for any related target
+ creation (such as child frames, child workers and new versions of service worker) and reported
+ through ``attachedToTarget``. The specified target is also auto-attached.
+ This cancels the effect of any previous ``setAutoAttach`` and is also cancelled by subsequent
+ ``setAutoAttach``. Only available at the Browser target.
+
+ **EXPERIMENTAL**
+
+ :param target_id:
+ :param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets.
+ '''
+ params: T_JSON_DICT = dict()
+ params['targetId'] = target_id.to_json()
+ params['waitForDebuggerOnStart'] = wait_for_debugger_on_start
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Target.autoAttachRelated',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_discover_targets(
discover: bool
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
diff --git a/cdp/tracing.py b/cdp/tracing.py
index 475751b..e301790 100644
--- a/cdp/tracing.py
+++ b/cdp/tracing.py
@@ -120,6 +120,44 @@ def from_json(cls, json: str) -> StreamCompression:
return cls(json)
+class MemoryDumpLevelOfDetail(enum.Enum):
+ '''
+ Details exposed when memory request explicitly declared.
+ Keep consistent with memory_dump_request_args.h and
+ memory_instrumentation.mojom
+ '''
+ BACKGROUND = "background"
+ LIGHT = "light"
+ DETAILED = "detailed"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> MemoryDumpLevelOfDetail:
+ return cls(json)
+
+
+class TracingBackend(enum.Enum):
+ '''
+ Backend type to use for tracing. ``chrome`` uses the Chrome-integrated
+ tracing service and is supported on all platforms. ``system`` is only
+ supported on Chrome OS and uses the Perfetto system tracing service.
+ ``auto`` chooses ``system`` when the perfettoConfig provided to Tracing.start
+ specifies at least one non-Chrome data source; otherwise uses ``chrome``.
+ '''
+ AUTO = "auto"
+ CHROME = "chrome"
+ SYSTEM = "system"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> TracingBackend:
+ return cls(json)
+
+
def end() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Stop trace events collection.
@@ -160,17 +198,28 @@ def record_clock_sync_marker(
json = yield cmd_dict
-def request_memory_dump() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, bool]]:
+def request_memory_dump(
+ deterministic: typing.Optional[bool] = None,
+ level_of_detail: typing.Optional[MemoryDumpLevelOfDetail] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[str, bool]]:
'''
Request a global memory dump.
+ :param deterministic: *(Optional)* Enables more deterministic results by forcing garbage collection
+ :param level_of_detail: *(Optional)* Specifies level of details in memory dump. Defaults to "detailed".
:returns: A tuple with the following items:
0. **dumpGuid** - GUID of the resulting global memory dump.
1. **success** - True iff the global memory dump succeeded.
'''
+ params: T_JSON_DICT = dict()
+ if deterministic is not None:
+ params['deterministic'] = deterministic
+ if level_of_detail is not None:
+ params['levelOfDetail'] = level_of_detail.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Tracing.requestMemoryDump',
+ 'params': params,
}
json = yield cmd_dict
return (
@@ -186,7 +235,9 @@ def start(
transfer_mode: typing.Optional[str] = None,
stream_format: typing.Optional[StreamFormat] = None,
stream_compression: typing.Optional[StreamCompression] = None,
- trace_config: typing.Optional[TraceConfig] = None
+ trace_config: typing.Optional[TraceConfig] = None,
+ perfetto_config: typing.Optional[str] = None,
+ tracing_backend: typing.Optional[TracingBackend] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Start trace events collection.
@@ -196,8 +247,10 @@ def start(
:param buffer_usage_reporting_interval: *(Optional)* If set, the agent will issue bufferUsage events at this interval, specified in milliseconds
:param transfer_mode: *(Optional)* Whether to report trace events as series of dataCollected events or to save trace to a stream (defaults to ```ReportEvents````).
:param stream_format: *(Optional)* Trace data format to use. This only applies when using ````ReturnAsStream```` transfer mode (defaults to ````json````).
- :param stream_compression: *(Optional)* Compression format to use. This only applies when using ````ReturnAsStream```` transfer mode (defaults to ````none```)
+ :param stream_compression: *(Optional)* Compression format to use. This only applies when using ````ReturnAsStream```` transfer mode (defaults to ````none````)
:param trace_config: *(Optional)*
+ :param perfetto_config: *(Optional)* Base64-encoded serialized perfetto.protos.TraceConfig protobuf message When specified, the parameters ````categories````, ````options````, ````traceConfig```` are ignored. (Encoded as a base64 string when passed over JSON)
+ :param tracing_backend: *(Optional)* Backend type (defaults to ````auto```)
'''
params: T_JSON_DICT = dict()
if categories is not None:
@@ -214,6 +267,10 @@ def start(
params['streamCompression'] = stream_compression.to_json()
if trace_config is not None:
params['traceConfig'] = trace_config.to_json()
+ if perfetto_config is not None:
+ params['perfettoConfig'] = perfetto_config
+ if tracing_backend is not None:
+ params['tracingBackend'] = tracing_backend.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Tracing.start',
'params': params,
diff --git a/cdp/web_audio.py b/cdp/web_audio.py
index 1daee6d..fb89592 100644
--- a/cdp/web_audio.py
+++ b/cdp/web_audio.py
@@ -12,19 +12,19 @@
import typing
-class ContextId(str):
+class GraphObjectId(str):
'''
- Context's UUID in string
+ An unique ID for a graph object (AudioContext, AudioNode, AudioParam) in Web Audio API
'''
def to_json(self) -> str:
return self
@classmethod
- def from_json(cls, json: str) -> ContextId:
+ def from_json(cls, json: str) -> GraphObjectId:
return cls(json)
def __repr__(self):
- return 'ContextId({})'.format(super().__repr__())
+ return 'GraphObjectId({})'.format(super().__repr__())
class ContextType(enum.Enum):
@@ -58,6 +58,82 @@ def from_json(cls, json: str) -> ContextState:
return cls(json)
+class NodeType(str):
+ '''
+ Enum of AudioNode types
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> NodeType:
+ return cls(json)
+
+ def __repr__(self):
+ return 'NodeType({})'.format(super().__repr__())
+
+
+class ChannelCountMode(enum.Enum):
+ '''
+ Enum of AudioNode::ChannelCountMode from the spec
+ '''
+ CLAMPED_MAX = "clamped-max"
+ EXPLICIT = "explicit"
+ MAX_ = "max"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ChannelCountMode:
+ return cls(json)
+
+
+class ChannelInterpretation(enum.Enum):
+ '''
+ Enum of AudioNode::ChannelInterpretation from the spec
+ '''
+ DISCRETE = "discrete"
+ SPEAKERS = "speakers"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> ChannelInterpretation:
+ return cls(json)
+
+
+class ParamType(str):
+ '''
+ Enum of AudioParam types
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> ParamType:
+ return cls(json)
+
+ def __repr__(self):
+ return 'ParamType({})'.format(super().__repr__())
+
+
+class AutomationRate(enum.Enum):
+ '''
+ Enum of AudioParam::AutomationRate from the spec
+ '''
+ A_RATE = "a-rate"
+ K_RATE = "k-rate"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AutomationRate:
+ return cls(json)
+
+
@dataclass
class ContextRealtimeData:
'''
@@ -66,7 +142,7 @@ class ContextRealtimeData:
#: The current context time in second in BaseAudioContext.
current_time: float
- #: The time spent on rendering graph divided by render qunatum duration,
+ #: The time spent on rendering graph divided by render quantum duration,
#: and multiplied by 100. 100 means the audio renderer reached the full
#: capacity and glitch may occur.
render_capacity: float
@@ -100,7 +176,7 @@ class BaseAudioContext:
'''
Protocol object for BaseAudioContext
'''
- context_id: ContextId
+ context_id: GraphObjectId
context_type: ContextType
@@ -132,7 +208,7 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> BaseAudioContext:
return cls(
- context_id=ContextId.from_json(json['contextId']),
+ context_id=GraphObjectId.from_json(json['contextId']),
context_type=ContextType.from_json(json['contextType']),
context_state=ContextState.from_json(json['contextState']),
callback_buffer_size=float(json['callbackBufferSize']),
@@ -142,6 +218,123 @@ def from_json(cls, json: T_JSON_DICT) -> BaseAudioContext:
)
+@dataclass
+class AudioListener:
+ '''
+ Protocol object for AudioListener
+ '''
+ listener_id: GraphObjectId
+
+ context_id: GraphObjectId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['listenerId'] = self.listener_id.to_json()
+ json['contextId'] = self.context_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioListener:
+ return cls(
+ listener_id=GraphObjectId.from_json(json['listenerId']),
+ context_id=GraphObjectId.from_json(json['contextId']),
+ )
+
+
+@dataclass
+class AudioNode:
+ '''
+ Protocol object for AudioNode
+ '''
+ node_id: GraphObjectId
+
+ context_id: GraphObjectId
+
+ node_type: NodeType
+
+ number_of_inputs: float
+
+ number_of_outputs: float
+
+ channel_count: float
+
+ channel_count_mode: ChannelCountMode
+
+ channel_interpretation: ChannelInterpretation
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['nodeId'] = self.node_id.to_json()
+ json['contextId'] = self.context_id.to_json()
+ json['nodeType'] = self.node_type.to_json()
+ json['numberOfInputs'] = self.number_of_inputs
+ json['numberOfOutputs'] = self.number_of_outputs
+ json['channelCount'] = self.channel_count
+ json['channelCountMode'] = self.channel_count_mode.to_json()
+ json['channelInterpretation'] = self.channel_interpretation.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioNode:
+ return cls(
+ node_id=GraphObjectId.from_json(json['nodeId']),
+ context_id=GraphObjectId.from_json(json['contextId']),
+ node_type=NodeType.from_json(json['nodeType']),
+ number_of_inputs=float(json['numberOfInputs']),
+ number_of_outputs=float(json['numberOfOutputs']),
+ channel_count=float(json['channelCount']),
+ channel_count_mode=ChannelCountMode.from_json(json['channelCountMode']),
+ channel_interpretation=ChannelInterpretation.from_json(json['channelInterpretation']),
+ )
+
+
+@dataclass
+class AudioParam:
+ '''
+ Protocol object for AudioParam
+ '''
+ param_id: GraphObjectId
+
+ node_id: GraphObjectId
+
+ context_id: GraphObjectId
+
+ param_type: ParamType
+
+ rate: AutomationRate
+
+ default_value: float
+
+ min_value: float
+
+ max_value: float
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['paramId'] = self.param_id.to_json()
+ json['nodeId'] = self.node_id.to_json()
+ json['contextId'] = self.context_id.to_json()
+ json['paramType'] = self.param_type.to_json()
+ json['rate'] = self.rate.to_json()
+ json['defaultValue'] = self.default_value
+ json['minValue'] = self.min_value
+ json['maxValue'] = self.max_value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioParam:
+ return cls(
+ param_id=GraphObjectId.from_json(json['paramId']),
+ node_id=GraphObjectId.from_json(json['nodeId']),
+ context_id=GraphObjectId.from_json(json['contextId']),
+ param_type=ParamType.from_json(json['paramType']),
+ rate=AutomationRate.from_json(json['rate']),
+ default_value=float(json['defaultValue']),
+ min_value=float(json['minValue']),
+ max_value=float(json['maxValue']),
+ )
+
+
def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enables the WebAudio domain and starts sending context lifetime events.
@@ -163,7 +356,7 @@ def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def get_realtime_data(
- context_id: ContextId
+ context_id: GraphObjectId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,ContextRealtimeData]:
'''
Fetch the realtime data from the registered contexts.
@@ -196,18 +389,18 @@ def from_json(cls, json: T_JSON_DICT) -> ContextCreated:
)
-@event_class('WebAudio.contextDestroyed')
+@event_class('WebAudio.contextWillBeDestroyed')
@dataclass
-class ContextDestroyed:
+class ContextWillBeDestroyed:
'''
- Notifies that existing BaseAudioContext has been destroyed.
+ Notifies that an existing BaseAudioContext will be destroyed.
'''
- context_id: ContextId
+ context_id: GraphObjectId
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> ContextDestroyed:
+ def from_json(cls, json: T_JSON_DICT) -> ContextWillBeDestroyed:
return cls(
- context_id=ContextId.from_json(json['contextId'])
+ context_id=GraphObjectId.from_json(json['contextId'])
)
@@ -224,3 +417,189 @@ def from_json(cls, json: T_JSON_DICT) -> ContextChanged:
return cls(
context=BaseAudioContext.from_json(json['context'])
)
+
+
+@event_class('WebAudio.audioListenerCreated')
+@dataclass
+class AudioListenerCreated:
+ '''
+ Notifies that the construction of an AudioListener has finished.
+ '''
+ listener: AudioListener
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioListenerCreated:
+ return cls(
+ listener=AudioListener.from_json(json['listener'])
+ )
+
+
+@event_class('WebAudio.audioListenerWillBeDestroyed')
+@dataclass
+class AudioListenerWillBeDestroyed:
+ '''
+ Notifies that a new AudioListener has been created.
+ '''
+ context_id: GraphObjectId
+ listener_id: GraphObjectId
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioListenerWillBeDestroyed:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ listener_id=GraphObjectId.from_json(json['listenerId'])
+ )
+
+
+@event_class('WebAudio.audioNodeCreated')
+@dataclass
+class AudioNodeCreated:
+ '''
+ Notifies that a new AudioNode has been created.
+ '''
+ node: AudioNode
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioNodeCreated:
+ return cls(
+ node=AudioNode.from_json(json['node'])
+ )
+
+
+@event_class('WebAudio.audioNodeWillBeDestroyed')
+@dataclass
+class AudioNodeWillBeDestroyed:
+ '''
+ Notifies that an existing AudioNode has been destroyed.
+ '''
+ context_id: GraphObjectId
+ node_id: GraphObjectId
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioNodeWillBeDestroyed:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ node_id=GraphObjectId.from_json(json['nodeId'])
+ )
+
+
+@event_class('WebAudio.audioParamCreated')
+@dataclass
+class AudioParamCreated:
+ '''
+ Notifies that a new AudioParam has been created.
+ '''
+ param: AudioParam
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioParamCreated:
+ return cls(
+ param=AudioParam.from_json(json['param'])
+ )
+
+
+@event_class('WebAudio.audioParamWillBeDestroyed')
+@dataclass
+class AudioParamWillBeDestroyed:
+ '''
+ Notifies that an existing AudioParam has been destroyed.
+ '''
+ context_id: GraphObjectId
+ node_id: GraphObjectId
+ param_id: GraphObjectId
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AudioParamWillBeDestroyed:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ node_id=GraphObjectId.from_json(json['nodeId']),
+ param_id=GraphObjectId.from_json(json['paramId'])
+ )
+
+
+@event_class('WebAudio.nodesConnected')
+@dataclass
+class NodesConnected:
+ '''
+ Notifies that two AudioNodes are connected.
+ '''
+ context_id: GraphObjectId
+ source_id: GraphObjectId
+ destination_id: GraphObjectId
+ source_output_index: typing.Optional[float]
+ destination_input_index: typing.Optional[float]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NodesConnected:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ source_id=GraphObjectId.from_json(json['sourceId']),
+ destination_id=GraphObjectId.from_json(json['destinationId']),
+ source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None,
+ destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None
+ )
+
+
+@event_class('WebAudio.nodesDisconnected')
+@dataclass
+class NodesDisconnected:
+ '''
+ Notifies that AudioNodes are disconnected. The destination can be null, and it means all the outgoing connections from the source are disconnected.
+ '''
+ context_id: GraphObjectId
+ source_id: GraphObjectId
+ destination_id: GraphObjectId
+ source_output_index: typing.Optional[float]
+ destination_input_index: typing.Optional[float]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NodesDisconnected:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ source_id=GraphObjectId.from_json(json['sourceId']),
+ destination_id=GraphObjectId.from_json(json['destinationId']),
+ source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None,
+ destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None
+ )
+
+
+@event_class('WebAudio.nodeParamConnected')
+@dataclass
+class NodeParamConnected:
+ '''
+ Notifies that an AudioNode is connected to an AudioParam.
+ '''
+ context_id: GraphObjectId
+ source_id: GraphObjectId
+ destination_id: GraphObjectId
+ source_output_index: typing.Optional[float]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NodeParamConnected:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ source_id=GraphObjectId.from_json(json['sourceId']),
+ destination_id=GraphObjectId.from_json(json['destinationId']),
+ source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None
+ )
+
+
+@event_class('WebAudio.nodeParamDisconnected')
+@dataclass
+class NodeParamDisconnected:
+ '''
+ Notifies that an AudioNode is disconnected to an AudioParam.
+ '''
+ context_id: GraphObjectId
+ source_id: GraphObjectId
+ destination_id: GraphObjectId
+ source_output_index: typing.Optional[float]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> NodeParamDisconnected:
+ return cls(
+ context_id=GraphObjectId.from_json(json['contextId']),
+ source_id=GraphObjectId.from_json(json['sourceId']),
+ destination_id=GraphObjectId.from_json(json['destinationId']),
+ source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None
+ )
diff --git a/cdp/web_authn.py b/cdp/web_authn.py
index 595b865..727d36a 100644
--- a/cdp/web_authn.py
+++ b/cdp/web_authn.py
@@ -36,6 +36,18 @@ def from_json(cls, json: str) -> AuthenticatorProtocol:
return cls(json)
+class Ctap2Version(enum.Enum):
+ CTAP2_0 = "ctap2_0"
+ CTAP2_1 = "ctap2_1"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> Ctap2Version:
+ return cls(json)
+
+
class AuthenticatorTransport(enum.Enum):
USB = "usb"
NFC = "nfc"
@@ -57,22 +69,58 @@ class VirtualAuthenticatorOptions:
transport: AuthenticatorTransport
- has_resident_key: bool
+ #: Defaults to ctap2_0. Ignored if ``protocol`` == u2f.
+ ctap2_version: typing.Optional[Ctap2Version] = None
- has_user_verification: bool
+ #: Defaults to false.
+ has_resident_key: typing.Optional[bool] = None
+
+ #: Defaults to false.
+ has_user_verification: typing.Optional[bool] = None
+
+ #: If set to true, the authenticator will support the largeBlob extension.
+ #: https://w3c.github.io/webauthn#largeBlob
+ #: Defaults to false.
+ has_large_blob: typing.Optional[bool] = None
+
+ #: If set to true, the authenticator will support the credBlob extension.
+ #: https://fidoalliance.org/specs/fido-v2.1-rd-20201208/fido-client-to-authenticator-protocol-v2.1-rd-20201208.html#sctn-credBlob-extension
+ #: Defaults to false.
+ has_cred_blob: typing.Optional[bool] = None
+
+ #: If set to true, the authenticator will support the minPinLength extension.
+ #: https://fidoalliance.org/specs/fido-v2.1-ps-20210615/fido-client-to-authenticator-protocol-v2.1-ps-20210615.html#sctn-minpinlength-extension
+ #: Defaults to false.
+ has_min_pin_length: typing.Optional[bool] = None
#: If set to true, tests of user presence will succeed immediately.
#: Otherwise, they will not be resolved. Defaults to true.
automatic_presence_simulation: typing.Optional[bool] = None
+ #: Sets whether User Verification succeeds or fails for an authenticator.
+ #: Defaults to false.
+ is_user_verified: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['protocol'] = self.protocol.to_json()
json['transport'] = self.transport.to_json()
- json['hasResidentKey'] = self.has_resident_key
- json['hasUserVerification'] = self.has_user_verification
+ if self.ctap2_version is not None:
+ json['ctap2Version'] = self.ctap2_version.to_json()
+ if self.has_resident_key is not None:
+ json['hasResidentKey'] = self.has_resident_key
+ if self.has_user_verification is not None:
+ json['hasUserVerification'] = self.has_user_verification
+ if self.has_large_blob is not None:
+ json['hasLargeBlob'] = self.has_large_blob
+ if self.has_cred_blob is not None:
+ json['hasCredBlob'] = self.has_cred_blob
+ if self.has_min_pin_length is not None:
+ json['hasMinPinLength'] = self.has_min_pin_length
if self.automatic_presence_simulation is not None:
json['automaticPresenceSimulation'] = self.automatic_presence_simulation
+ if self.is_user_verified is not None:
+ json['isUserVerified'] = self.is_user_verified
return json
@classmethod
@@ -80,9 +128,14 @@ def from_json(cls, json: T_JSON_DICT) -> VirtualAuthenticatorOptions:
return cls(
protocol=AuthenticatorProtocol.from_json(json['protocol']),
transport=AuthenticatorTransport.from_json(json['transport']),
- has_resident_key=bool(json['hasResidentKey']),
- has_user_verification=bool(json['hasUserVerification']),
+ ctap2_version=Ctap2Version.from_json(json['ctap2Version']) if 'ctap2Version' in json else None,
+ has_resident_key=bool(json['hasResidentKey']) if 'hasResidentKey' in json else None,
+ has_user_verification=bool(json['hasUserVerification']) if 'hasUserVerification' in json else None,
+ has_large_blob=bool(json['hasLargeBlob']) if 'hasLargeBlob' in json else None,
+ has_cred_blob=bool(json['hasCredBlob']) if 'hasCredBlob' in json else None,
+ has_min_pin_length=bool(json['hasMinPinLength']) if 'hasMinPinLength' in json else None,
automatic_presence_simulation=bool(json['automaticPresenceSimulation']) if 'automaticPresenceSimulation' in json else None,
+ is_user_verified=bool(json['isUserVerified']) if 'isUserVerified' in json else None,
)
@@ -90,12 +143,9 @@ def from_json(cls, json: T_JSON_DICT) -> VirtualAuthenticatorOptions:
class Credential:
credential_id: str
- #: SHA-256 hash of the Relying Party ID the credential is scoped to. Must
- #: be 32 bytes long.
- #: See https://w3c.github.io/webauthn/#rpidhash
- rp_id_hash: str
+ is_resident_credential: bool
- #: The private key in PKCS#8 format.
+ #: The ECDSA P-256 private key in PKCS#8 format. (Encoded as a base64 string when passed over JSON)
private_key: str
#: Signature counter. This is incremented by one for each successful
@@ -103,21 +153,42 @@ class Credential:
#: See https://w3c.github.io/webauthn/#signature-counter
sign_count: int
+ #: Relying Party ID the credential is scoped to. Must be set when adding a
+ #: credential.
+ rp_id: typing.Optional[str] = None
+
+ #: An opaque byte sequence with a maximum size of 64 bytes mapping the
+ #: credential to a specific user. (Encoded as a base64 string when passed over JSON)
+ user_handle: typing.Optional[str] = None
+
+ #: The large blob associated with the credential.
+ #: See https://w3c.github.io/webauthn/#sctn-large-blob-extension (Encoded as a base64 string when passed over JSON)
+ large_blob: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['credentialId'] = self.credential_id
- json['rpIdHash'] = self.rp_id_hash
+ json['isResidentCredential'] = self.is_resident_credential
json['privateKey'] = self.private_key
json['signCount'] = self.sign_count
+ if self.rp_id is not None:
+ json['rpId'] = self.rp_id
+ if self.user_handle is not None:
+ json['userHandle'] = self.user_handle
+ if self.large_blob is not None:
+ json['largeBlob'] = self.large_blob
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> Credential:
return cls(
credential_id=str(json['credentialId']),
- rp_id_hash=str(json['rpIdHash']),
+ is_resident_credential=bool(json['isResidentCredential']),
private_key=str(json['privateKey']),
sign_count=int(json['signCount']),
+ rp_id=str(json['rpId']) if 'rpId' in json else None,
+ user_handle=str(json['userHandle']) if 'userHandle' in json else None,
+ large_blob=str(json['largeBlob']) if 'largeBlob' in json else None,
)
@@ -198,6 +269,29 @@ def add_credential(
json = yield cmd_dict
+def get_credential(
+ authenticator_id: AuthenticatorId,
+ credential_id: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,Credential]:
+ '''
+ Returns a single credential stored in the given virtual authenticator that
+ matches the credential ID.
+
+ :param authenticator_id:
+ :param credential_id:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['authenticatorId'] = authenticator_id.to_json()
+ params['credentialId'] = credential_id
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'WebAuthn.getCredential',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return Credential.from_json(json['credential'])
+
+
def get_credentials(
authenticator_id: AuthenticatorId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Credential]]:
@@ -217,6 +311,26 @@ def get_credentials(
return [Credential.from_json(i) for i in json['credentials']]
+def remove_credential(
+ authenticator_id: AuthenticatorId,
+ credential_id: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Removes a credential from the authenticator.
+
+ :param authenticator_id:
+ :param credential_id:
+ '''
+ params: T_JSON_DICT = dict()
+ params['authenticatorId'] = authenticator_id.to_json()
+ params['credentialId'] = credential_id
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'WebAuthn.removeCredential',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def clear_credentials(
authenticator_id: AuthenticatorId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -253,3 +367,24 @@ def set_user_verified(
'params': params,
}
json = yield cmd_dict
+
+
+def set_automatic_presence_simulation(
+ authenticator_id: AuthenticatorId,
+ enabled: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets whether tests of user presence will succeed immediately (if true) or fail to resolve (if false) for an authenticator.
+ The default is true.
+
+ :param authenticator_id:
+ :param enabled:
+ '''
+ params: T_JSON_DICT = dict()
+ params['authenticatorId'] = authenticator_id.to_json()
+ params['enabled'] = enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'WebAuthn.setAutomaticPresenceSimulation',
+ 'params': params,
+ }
+ json = yield cmd_dict
diff --git a/docs/api/accessibility.rst b/docs/api/accessibility.rst
index 078bd80..6a8a0b3 100644
--- a/docs/api/accessibility.rst
+++ b/docs/api/accessibility.rst
@@ -83,11 +83,31 @@ to. For more information, see
.. autofunction:: enable
+.. autofunction:: get_ax_node_and_ancestors
+
+.. autofunction:: get_child_ax_nodes
+
.. autofunction:: get_full_ax_tree
.. autofunction:: get_partial_ax_tree
+.. autofunction:: get_root_ax_node
+
+.. autofunction:: query_ax_tree
+
Events
------
-*There are no events in this module.*
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: LoadComplete
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NodesUpdated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/audits.rst b/docs/api/audits.rst
index d375a7a..0454630 100644
--- a/docs/api/audits.rst
+++ b/docs/api/audits.rst
@@ -14,7 +14,205 @@ Audits domain allows investigation of page violations and possible improvements.
Types
-----
-*There are no types in this module.*
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
+
+.. autoclass:: AffectedCookie
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AffectedRequest
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AffectedFrame
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SameSiteCookieExclusionReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SameSiteCookieWarningReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SameSiteCookieOperation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SameSiteCookieIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: MixedContentResolutionStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: MixedContentResourceType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: MixedContentIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BlockedByResponseReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BlockedByResponseIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: HeavyAdResolutionStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: HeavyAdReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: HeavyAdIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ContentSecurityPolicyViolationType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SourceCodeLocation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ContentSecurityPolicyIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedArrayBufferIssueType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedArrayBufferIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TwaQualityEnforcementViolationType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TrustedWebActivityIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LowTextContrastIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CorsIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AttributionReportingIssueType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AttributionReportingIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: QuirksModeIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NavigatorUserAgentIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: GenericIssueErrorType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: GenericIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: DeprecationIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ClientHintIssueReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: FederatedAuthRequestIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: FederatedAuthRequestIssueReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ClientHintIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InspectorIssueCode
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InspectorIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: IssueId
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InspectorIssue
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
Commands
--------
@@ -28,9 +226,22 @@ commands, and ``z`` is the return type you should pay attention
to. For more information, see
:ref:`Getting Started: Commands `.
+.. autofunction:: check_contrast
+
+.. autofunction:: disable
+
+.. autofunction:: enable
+
.. autofunction:: get_encoded_response
Events
------
-*There are no events in this module.*
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: IssueAdded
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/browser.rst b/docs/api/browser.rst
index ba64fe9..2257b14 100644
--- a/docs/api/browser.rst
+++ b/docs/api/browser.rst
@@ -17,6 +17,11 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
+.. autoclass:: BrowserContextID
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: WindowID
:members:
:undoc-members:
@@ -37,6 +42,21 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: PermissionSetting
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PermissionDescriptor
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BrowserCommandId
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Bucket
:members:
:undoc-members:
@@ -59,12 +79,16 @@ commands, and ``z`` is the return type you should pay attention
to. For more information, see
:ref:`Getting Started: Commands `.
+.. autofunction:: cancel_download
+
.. autofunction:: close
.. autofunction:: crash
.. autofunction:: crash_gpu_process
+.. autofunction:: execute_browser_command
+
.. autofunction:: get_browser_command_line
.. autofunction:: get_histogram
@@ -83,9 +107,25 @@ to. For more information, see
.. autofunction:: set_dock_tile
+.. autofunction:: set_download_behavior
+
+.. autofunction:: set_permission
+
.. autofunction:: set_window_bounds
Events
------
-*There are no events in this module.*
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: DownloadWillBegin
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: DownloadProgress
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/cast.rst b/docs/api/cast.rst
index fba003b..e0e050e 100644
--- a/docs/api/cast.rst
+++ b/docs/api/cast.rst
@@ -43,6 +43,8 @@ to. For more information, see
.. autofunction:: set_sink_to_use
+.. autofunction:: start_desktop_mirroring
+
.. autofunction:: start_tab_mirroring
.. autofunction:: stop_casting
diff --git a/docs/api/css.rst b/docs/api/css.rst
index f170a16..c68e7fe 100644
--- a/docs/api/css.rst
+++ b/docs/api/css.rst
@@ -114,11 +114,26 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CSSContainerQuery
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CSSSupports
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: PlatformFontUsage
:members:
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: FontVariationAxis
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: FontFace
:members:
:undoc-members:
@@ -177,10 +192,14 @@ to. For more information, see
.. autofunction:: get_style_sheet_text
+.. autofunction:: set_container_query_text
+
.. autofunction:: set_effective_property_value_for_node
.. autofunction:: set_keyframe_key
+.. autofunction:: set_local_fonts_enabled
+
.. autofunction:: set_media_text
.. autofunction:: set_rule_selector
@@ -189,12 +208,18 @@ to. For more information, see
.. autofunction:: set_style_texts
+.. autofunction:: set_supports_text
+
.. autofunction:: start_rule_usage_tracking
.. autofunction:: stop_rule_usage_tracking
+.. autofunction:: take_computed_style_updates
+
.. autofunction:: take_coverage_delta
+.. autofunction:: track_computed_style_updates
+
Events
------
diff --git a/docs/api/debugger.rst b/docs/api/debugger.rst
index 611f717..caa81c3 100644
--- a/docs/api/debugger.rst
+++ b/docs/api/debugger.rst
@@ -38,6 +38,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: LocationRange
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: CallFrame
:members:
:undoc-members:
@@ -58,6 +63,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ScriptLanguage
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: DebugSymbols
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -84,6 +99,8 @@ to. For more information, see
.. autofunction:: get_stack_trace
+.. autofunction:: get_wasm_bytecode
+
.. autofunction:: pause
.. autofunction:: pause_on_async_call
diff --git a/docs/api/dom.rst b/docs/api/dom.rst
index c677ffb..d629423 100644
--- a/docs/api/dom.rst
+++ b/docs/api/dom.rst
@@ -48,6 +48,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CompatibilityMode
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Node
:members:
:undoc-members:
@@ -78,6 +83,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CSSComputedStyleProperty
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -108,6 +118,8 @@ to. For more information, see
.. autofunction:: get_box_model
+.. autofunction:: get_container_for_node
+
.. autofunction:: get_content_quads
.. autofunction:: get_document
@@ -120,8 +132,14 @@ to. For more information, see
.. autofunction:: get_node_for_location
+.. autofunction:: get_node_stack_traces
+
+.. autofunction:: get_nodes_for_subtree_by_style
+
.. autofunction:: get_outer_html
+.. autofunction:: get_querying_descendants_for_container
+
.. autofunction:: get_relayout_boundary
.. autofunction:: get_search_results
@@ -158,6 +176,8 @@ to. For more information, see
.. autofunction:: resolve_node
+.. autofunction:: scroll_into_view_if_needed
+
.. autofunction:: set_attribute_value
.. autofunction:: set_attributes_as_text
@@ -168,6 +188,8 @@ to. For more information, see
.. autofunction:: set_node_name
+.. autofunction:: set_node_stack_traces_enabled
+
.. autofunction:: set_node_value
.. autofunction:: set_outer_html
diff --git a/docs/api/dom_debugger.rst b/docs/api/dom_debugger.rst
index 49a152c..4c5ec66 100644
--- a/docs/api/dom_debugger.rst
+++ b/docs/api/dom_debugger.rst
@@ -23,6 +23,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CSPViolationType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: EventListener
:members:
:undoc-members:
@@ -50,6 +55,8 @@ to. For more information, see
.. autofunction:: remove_xhr_breakpoint
+.. autofunction:: set_break_on_csp_violation
+
.. autofunction:: set_dom_breakpoint
.. autofunction:: set_event_listener_breakpoint
diff --git a/docs/api/emulation.rst b/docs/api/emulation.rst
index f66bea9..5abce21 100644
--- a/docs/api/emulation.rst
+++ b/docs/api/emulation.rst
@@ -22,11 +22,36 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: DisplayFeature
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: MediaFeature
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: VirtualTimePolicy
:members:
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: UserAgentBrandVersion
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: UserAgentMetadata
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: DisabledImageType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -45,24 +70,36 @@ to. For more information, see
.. autofunction:: clear_geolocation_override
+.. autofunction:: clear_idle_override
+
.. autofunction:: reset_page_scale_factor
+.. autofunction:: set_auto_dark_mode_override
+
.. autofunction:: set_cpu_throttling_rate
.. autofunction:: set_default_background_color_override
.. autofunction:: set_device_metrics_override
+.. autofunction:: set_disabled_image_types
+
.. autofunction:: set_document_cookie_disabled
.. autofunction:: set_emit_touch_events_for_mouse
.. autofunction:: set_emulated_media
+.. autofunction:: set_emulated_vision_deficiency
+
.. autofunction:: set_focus_emulation_enabled
.. autofunction:: set_geolocation_override
+.. autofunction:: set_idle_override
+
+.. autofunction:: set_locale_override
+
.. autofunction:: set_navigator_overrides
.. autofunction:: set_page_scale_factor
diff --git a/docs/api/event_breakpoints.rst b/docs/api/event_breakpoints.rst
new file mode 100644
index 0000000..af20be4
--- /dev/null
+++ b/docs/api/event_breakpoints.rst
@@ -0,0 +1,41 @@
+EventBreakpoints
+================
+
+EventBreakpoints permits setting breakpoints on particular operations and
+events in targets that run JavaScript but do not have a DOM.
+JavaScript execution will stop on these operations as if there was a regular
+breakpoint set.
+
+*This CDP domain is experimental.*
+
+.. module:: cdp.event_breakpoints
+
+* Types_
+* Commands_
+* Events_
+
+Types
+-----
+
+*There are no types in this module.*
+
+Commands
+--------
+
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
+.. autofunction:: remove_instrumentation_breakpoint
+
+.. autofunction:: set_instrumentation_breakpoint
+
+Events
+------
+
+*There are no events in this module.*
diff --git a/docs/api/fetch.rst b/docs/api/fetch.rst
index beabe3a..b21b2d1 100644
--- a/docs/api/fetch.rst
+++ b/docs/api/fetch.rst
@@ -3,8 +3,6 @@ Fetch
A domain for letting clients substitute browser's network layer with client code.
-*This CDP domain is experimental.*
-
.. module:: cdp.fetch
* Types_
@@ -63,6 +61,8 @@ to. For more information, see
.. autofunction:: continue_request
+.. autofunction:: continue_response
+
.. autofunction:: continue_with_auth
.. autofunction:: disable
diff --git a/docs/api/input_.rst b/docs/api/input_.rst
index 8914793..cf8dff6 100644
--- a/docs/api/input_.rst
+++ b/docs/api/input_.rst
@@ -25,11 +25,26 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: MouseButton
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: TimeSinceEpoch
:members:
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: DragDataItem
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: DragData
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -42,6 +57,8 @@ commands, and ``z`` is the return type you should pay attention
to. For more information, see
:ref:`Getting Started: Commands `.
+.. autofunction:: dispatch_drag_event
+
.. autofunction:: dispatch_key_event
.. autofunction:: dispatch_mouse_event
@@ -50,10 +67,14 @@ to. For more information, see
.. autofunction:: emulate_touch_from_mouse_event
+.. autofunction:: ime_set_composition
+
.. autofunction:: insert_text
.. autofunction:: set_ignore_input_events
+.. autofunction:: set_intercept_drags
+
.. autofunction:: synthesize_pinch_gesture
.. autofunction:: synthesize_scroll_gesture
@@ -63,4 +84,11 @@ to. For more information, see
Events
------
-*There are no events in this module.*
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: DragIntercepted
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/media.rst b/docs/api/media.rst
new file mode 100644
index 0000000..a167f4e
--- /dev/null
+++ b/docs/api/media.rst
@@ -0,0 +1,98 @@
+Media
+=====
+
+This domain allows detailed inspection of media elements
+
+*This CDP domain is experimental.*
+
+.. module:: cdp.media
+
+* Types_
+* Commands_
+* Events_
+
+Types
+-----
+
+Generally, you do not need to instantiate CDP types
+yourself. Instead, the API creates objects for you as return
+values from commands, and then you can use those objects as
+arguments to other commands.
+
+.. autoclass:: PlayerId
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: Timestamp
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerMessage
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerProperty
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerEvent
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerError
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+Commands
+--------
+
+Each command is a generator function. The return
+type ``Generator[x, y, z]`` indicates that the generator
+*yields* arguments of type ``x``, it must be resumed with
+an argument of type ``y``, and it returns type ``z``. In
+this library, types ``x`` and ``y`` are the same for all
+commands, and ``z`` is the return type you should pay attention
+to. For more information, see
+:ref:`Getting Started: Commands `.
+
+.. autofunction:: disable
+
+.. autofunction:: enable
+
+Events
+------
+
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: PlayerPropertiesChanged
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerEventsAdded
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerMessagesLogged
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayerErrorsRaised
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PlayersCreated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/network.rst b/docs/api/network.rst
index b76bde4..025d6a3 100644
--- a/docs/api/network.rst
+++ b/docs/api/network.rst
@@ -68,6 +68,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CookiePriority
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CookieSourceScheme
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: ResourceTiming
:members:
:undoc-members:
@@ -78,6 +88,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: PostDataEntry
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Request
:members:
:undoc-members:
@@ -103,6 +118,31 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CorsError
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CorsErrorStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ServiceWorkerResponseSource
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TrustTokenParams
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TrustTokenOperationType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Response
:members:
:undoc-members:
@@ -208,6 +248,86 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ContentEncoding
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PrivateNetworkRequestPolicy
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: IPAddressSpace
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ConnectTiming
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ClientSecurityState
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CrossOriginOpenerPolicyValue
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CrossOriginOpenerPolicyStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CrossOriginEmbedderPolicyValue
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CrossOriginEmbedderPolicyStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SecurityIsolationStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportId
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportingApiReport
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportingApiEndpoint
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LoadNetworkResourcePageResult
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LoadNetworkResourceOptions
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -226,6 +346,8 @@ to. For more information, see
.. autofunction:: can_emulate_network_conditions
+.. autofunction:: clear_accepted_encodings_override
+
.. autofunction:: clear_browser_cache
.. autofunction:: clear_browser_cookies
@@ -240,6 +362,8 @@ to. For more information, see
.. autofunction:: enable
+.. autofunction:: enable_reporting_api
+
.. autofunction:: get_all_cookies
.. autofunction:: get_certificate
@@ -252,10 +376,18 @@ to. For more information, see
.. autofunction:: get_response_body_for_interception
+.. autofunction:: get_security_isolation_status
+
+.. autofunction:: load_network_resource
+
.. autofunction:: replay_xhr
.. autofunction:: search_in_response_body
+.. autofunction:: set_accepted_encodings
+
+.. autofunction:: set_attach_debug_stack
+
.. autofunction:: set_blocked_ur_ls
.. autofunction:: set_bypass_service_worker
@@ -266,8 +398,6 @@ to. For more information, see
.. autofunction:: set_cookies
-.. autofunction:: set_data_size_limits_for_test
-
.. autofunction:: set_extra_http_headers
.. autofunction:: set_request_interception
@@ -368,6 +498,21 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: WebTransportCreated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: WebTransportConnectionEstablished
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: WebTransportClosed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: RequestWillBeSentExtraInfo
:members:
:undoc-members:
@@ -377,3 +522,43 @@ you use the event's attributes.
:members:
:undoc-members:
:exclude-members: from_json, to_json
+
+.. autoclass:: TrustTokenOperationDone
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SubresourceWebBundleMetadataReceived
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SubresourceWebBundleMetadataError
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SubresourceWebBundleInnerResponseParsed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SubresourceWebBundleInnerResponseError
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportingApiReportAdded
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportingApiReportUpdated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReportingApiEndpointsChangedForOrigin
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/overlay.rst b/docs/api/overlay.rst
index 7792e15..2e89767 100644
--- a/docs/api/overlay.rst
+++ b/docs/api/overlay.rst
@@ -19,11 +19,96 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
+.. autoclass:: SourceOrderConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: GridHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: FlexContainerHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: FlexItemHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LineStyle
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BoxStyle
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ContrastAlgorithm
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: HighlightConfig
:members:
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ColorFormat
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: GridNodeHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: FlexNodeHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ScrollSnapContainerHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ScrollSnapHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: HingeConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ContainerQueryHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ContainerQueryContainerHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: IsolatedElementHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: IsolationModeHighlightConfig
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: InspectMode
:members:
:undoc-members:
@@ -45,8 +130,12 @@ to. For more information, see
.. autofunction:: enable
+.. autofunction:: get_grid_highlight_objects_for_test
+
.. autofunction:: get_highlight_object_for_test
+.. autofunction:: get_source_order_highlight_object_for_test
+
.. autofunction:: hide_highlight
.. autofunction:: highlight_frame
@@ -57,26 +146,42 @@ to. For more information, see
.. autofunction:: highlight_rect
+.. autofunction:: highlight_source_order
+
.. autofunction:: set_inspect_mode
.. autofunction:: set_paused_in_debugger_message
.. autofunction:: set_show_ad_highlights
+.. autofunction:: set_show_container_query_overlays
+
.. autofunction:: set_show_debug_borders
+.. autofunction:: set_show_flex_overlays
+
.. autofunction:: set_show_fps_counter
+.. autofunction:: set_show_grid_overlays
+
+.. autofunction:: set_show_hinge
+
.. autofunction:: set_show_hit_test_borders
+.. autofunction:: set_show_isolated_elements
+
.. autofunction:: set_show_layout_shift_regions
.. autofunction:: set_show_paint_rects
.. autofunction:: set_show_scroll_bottleneck_rects
+.. autofunction:: set_show_scroll_snap_overlays
+
.. autofunction:: set_show_viewport_size_on_resize
+.. autofunction:: set_show_web_vitals
+
Events
------
diff --git a/docs/api/page.rst b/docs/api/page.rst
index 043a880..3a8cf5d 100644
--- a/docs/api/page.rst
+++ b/docs/api/page.rst
@@ -22,6 +22,86 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AdFrameType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AdFrameExplanation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AdFrameStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SecureContextType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CrossOriginIsolatedContextType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: GatedAPIFeatures
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PermissionsPolicyFeature
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PermissionsPolicyBlockReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PermissionsPolicyBlockLocator
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: PermissionsPolicyFeatureState
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrialTokenStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrialStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrialUsageRestriction
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrialToken
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrialTokenWithStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: OriginTrial
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Frame
:members:
:undoc-members:
@@ -72,6 +152,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AppManifestParsedProperties
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: LayoutViewport
:members:
:undoc-members:
@@ -92,6 +177,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ScriptFontFamilies
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: FontSizes
:members:
:undoc-members:
@@ -102,6 +192,56 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ClientNavigationDisposition
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InstallabilityErrorArgument
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InstallabilityError
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ReferrerPolicy
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CompilationCacheParams
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NavigationType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BackForwardCacheNotRestoredReason
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BackForwardCacheNotRestoredReasonType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BackForwardCacheNotRestoredExplanation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: BackForwardCacheNotRestoredExplanationTree
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -148,6 +288,8 @@ to. For more information, see
.. autofunction:: generate_test_report
+.. autofunction:: get_app_id
+
.. autofunction:: get_app_manifest
.. autofunction:: get_cookies
@@ -158,14 +300,18 @@ to. For more information, see
.. autofunction:: get_layout_metrics
+.. autofunction:: get_manifest_icons
+
.. autofunction:: get_navigation_history
+.. autofunction:: get_origin_trials
+
+.. autofunction:: get_permissions_policy_state
+
.. autofunction:: get_resource_content
.. autofunction:: get_resource_tree
-.. autofunction:: handle_file_chooser
-
.. autofunction:: handle_java_script_dialog
.. autofunction:: navigate
@@ -174,6 +320,8 @@ to. For more information, see
.. autofunction:: print_to_pdf
+.. autofunction:: produce_compilation_cache
+
.. autofunction:: reload
.. autofunction:: remove_script_to_evaluate_on_load
@@ -208,7 +356,7 @@ to. For more information, see
.. autofunction:: set_lifecycle_events_enabled
-.. autofunction:: set_produce_compilation_cache
+.. autofunction:: set_spc_transaction_mode
.. autofunction:: set_touch_emulation_enabled
@@ -259,6 +407,11 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: DocumentOpened
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: FrameResized
:members:
:undoc-members:
@@ -289,6 +442,11 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: DownloadProgress
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: InterstitialHidden
:members:
:undoc-members:
@@ -314,6 +472,11 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: BackForwardCacheNotUsed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: LoadEventFired
:members:
:undoc-members:
diff --git a/docs/api/application_cache.rst b/docs/api/performance_timeline.rst
similarity index 77%
rename from docs/api/application_cache.rst
rename to docs/api/performance_timeline.rst
index 5e3964a..9c3d509 100644
--- a/docs/api/application_cache.rst
+++ b/docs/api/performance_timeline.rst
@@ -1,9 +1,12 @@
-ApplicationCache
-================
+PerformanceTimeline
+===================
+
+Reporting of performance timeline events, as specified in
+https://w3c.github.io/performance-timeline/#dom-performanceobserver.
*This CDP domain is experimental.*
-.. module:: cdp.application_cache
+.. module:: cdp.performance_timeline
* Types_
* Commands_
@@ -17,17 +20,22 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
-.. autoclass:: ApplicationCacheResource
+.. autoclass:: LargestContentfulPaint
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LayoutShiftAttribution
:members:
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: ApplicationCache
+.. autoclass:: LayoutShift
:members:
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: FrameWithManifest
+.. autoclass:: TimelineEvent
:members:
:undoc-members:
:exclude-members: from_json, to_json
@@ -46,12 +54,6 @@ to. For more information, see
.. autofunction:: enable
-.. autofunction:: get_application_cache_for_frame
-
-.. autofunction:: get_frames_with_manifests
-
-.. autofunction:: get_manifest_for_frame
-
Events
------
@@ -59,12 +61,7 @@ Generally, you do not need to instantiate CDP events
yourself. Instead, the API creates events for you and then
you use the event's attributes.
-.. autoclass:: ApplicationCacheStatusUpdated
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
-
-.. autoclass:: NetworkStateUpdated
+.. autoclass:: TimelineEventAdded
:members:
:undoc-members:
:exclude-members: from_json, to_json
diff --git a/docs/api/profiler.rst b/docs/api/profiler.rst
index 4019b39..02c25d6 100644
--- a/docs/api/profiler.rst
+++ b/docs/api/profiler.rst
@@ -112,3 +112,8 @@ you use the event's attributes.
:members:
:undoc-members:
:exclude-members: from_json, to_json
+
+.. autoclass:: PreciseCoverageDeltaUpdate
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/security.rst b/docs/api/security.rst
index d111318..d1c3e74 100644
--- a/docs/api/security.rst
+++ b/docs/api/security.rst
@@ -32,6 +32,26 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CertificateSecurityState
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SafetyTipStatus
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SafetyTipInfo
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: VisibleSecurityState
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: SecurityStateExplanation
:members:
:undoc-members:
@@ -81,6 +101,11 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: VisibleSecurityStateChanged
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: SecurityStateChanged
:members:
:undoc-members:
diff --git a/docs/api/service_worker.rst b/docs/api/service_worker.rst
index 86e7322..3ec58a0 100644
--- a/docs/api/service_worker.rst
+++ b/docs/api/service_worker.rst
@@ -63,6 +63,8 @@ to. For more information, see
.. autofunction:: disable
+.. autofunction:: dispatch_periodic_sync_event
+
.. autofunction:: dispatch_sync_event
.. autofunction:: enable
diff --git a/docs/api/storage.rst b/docs/api/storage.rst
index 4eb90b7..4989fb9 100644
--- a/docs/api/storage.rst
+++ b/docs/api/storage.rst
@@ -27,6 +27,26 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: TrustTokens
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InterestGroupAccessType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InterestGroupAd
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: InterestGroupDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -39,10 +59,26 @@ commands, and ``z`` is the return type you should pay attention
to. For more information, see
:ref:`Getting Started: Commands `.
+.. autofunction:: clear_cookies
+
.. autofunction:: clear_data_for_origin
+.. autofunction:: clear_trust_tokens
+
+.. autofunction:: get_cookies
+
+.. autofunction:: get_interest_group_details
+
+.. autofunction:: get_trust_tokens
+
.. autofunction:: get_usage_and_quota
+.. autofunction:: override_quota_for_origin
+
+.. autofunction:: set_cookies
+
+.. autofunction:: set_interest_group_tracking
+
.. autofunction:: track_cache_storage_for_origin
.. autofunction:: track_indexed_db_for_origin
@@ -77,3 +113,8 @@ you use the event's attributes.
:members:
:undoc-members:
:exclude-members: from_json, to_json
+
+.. autoclass:: InterestGroupAccessed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/system_info.rst b/docs/api/system_info.rst
index 4e704f5..1b595c7 100644
--- a/docs/api/system_info.rst
+++ b/docs/api/system_info.rst
@@ -44,6 +44,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: ImageType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: ImageDecodeAcceleratorCapability
:members:
:undoc-members:
diff --git a/docs/api/target.rst b/docs/api/target.rst
index 323cb37..263ae8a 100644
--- a/docs/api/target.rst
+++ b/docs/api/target.rst
@@ -27,11 +27,6 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: BrowserContextID
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
-
.. autoclass:: TargetInfo
:members:
:undoc-members:
@@ -60,6 +55,8 @@ to. For more information, see
.. autofunction:: attach_to_target
+.. autofunction:: auto_attach_related
+
.. autofunction:: close_target
.. autofunction:: create_browser_context
diff --git a/docs/api/tracing.rst b/docs/api/tracing.rst
index 4898f92..fd134ba 100644
--- a/docs/api/tracing.rst
+++ b/docs/api/tracing.rst
@@ -37,6 +37,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: MemoryDumpLevelOfDetail
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TracingBackend
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
diff --git a/docs/api/web_audio.rst b/docs/api/web_audio.rst
index 4e5233c..4000aab 100644
--- a/docs/api/web_audio.rst
+++ b/docs/api/web_audio.rst
@@ -20,7 +20,7 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
-.. autoclass:: ContextId
+.. autoclass:: GraphObjectId
:members:
:undoc-members:
:exclude-members: from_json, to_json
@@ -35,6 +35,31 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: NodeType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ChannelCountMode
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ChannelInterpretation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: ParamType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AutomationRate
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: ContextRealtimeData
:members:
:undoc-members:
@@ -45,6 +70,21 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AudioListener
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioNode
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioParam
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -75,7 +115,7 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: ContextDestroyed
+.. autoclass:: ContextWillBeDestroyed
:members:
:undoc-members:
:exclude-members: from_json, to_json
@@ -84,3 +124,53 @@ you use the event's attributes.
:members:
:undoc-members:
:exclude-members: from_json, to_json
+
+.. autoclass:: AudioListenerCreated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioListenerWillBeDestroyed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioNodeCreated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioNodeWillBeDestroyed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioParamCreated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: AudioParamWillBeDestroyed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NodesConnected
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NodesDisconnected
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NodeParamConnected
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: NodeParamDisconnected
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/web_authn.rst b/docs/api/web_authn.rst
index 8bf7d2a..8a1a0b9 100644
--- a/docs/api/web_authn.rst
+++ b/docs/api/web_authn.rst
@@ -30,6 +30,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: Ctap2Version
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: AuthenticatorTransport
:members:
:undoc-members:
@@ -67,10 +72,16 @@ to. For more information, see
.. autofunction:: enable
+.. autofunction:: get_credential
+
.. autofunction:: get_credentials
+.. autofunction:: remove_credential
+
.. autofunction:: remove_virtual_authenticator
+.. autofunction:: set_automatic_presence_simulation
+
.. autofunction:: set_user_verified
Events
diff --git a/generator/browser_protocol.json b/generator/browser_protocol.json
index d9a2683..9a80ae0 100644
--- a/generator/browser_protocol.json
+++ b/generator/browser_protocol.json
@@ -58,11 +58,13 @@
"description": "Enum of possible native property sources (as a subtype of a particular AXValueSourceType).",
"type": "string",
"enum": [
+ "description",
"figcaption",
"label",
"labelfor",
"labelwrapped",
"legend",
+ "rubyannotation",
"tablecaption",
"title",
"other"
@@ -306,6 +308,12 @@
"$ref": "AXProperty"
}
},
+ {
+ "name": "parentId",
+ "description": "ID for this node's parent.",
+ "optional": true,
+ "$ref": "AXNodeId"
+ },
{
"name": "childIds",
"description": "IDs for each of this node's child nodes.",
@@ -320,6 +328,12 @@
"description": "The backend ID for the associated DOM node, if any.",
"optional": true,
"$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "frameId",
+ "description": "The frame ID for the frame associated with this nodes document.",
+ "optional": true,
+ "$ref": "Page.FrameId"
}
]
}
@@ -376,11 +390,187 @@
},
{
"name": "getFullAXTree",
- "description": "Fetches the entire accessibility tree",
+ "description": "Fetches the entire accessibility tree for the root Document",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "depth",
+ "description": "The maximum depth at which descendants of the root node should be retrieved.\nIf omitted, the full tree is returned.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "max_depth",
+ "description": "Deprecated. This parameter has been renamed to `depth`. If depth is not provided, max_depth will be used.",
+ "deprecated": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "frameId",
+ "description": "The frame for whose document the AX tree should be retrieved.\nIf omited, the root frame is used.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodes",
+ "type": "array",
+ "items": {
+ "$ref": "AXNode"
+ }
+ }
+ ]
+ },
+ {
+ "name": "getRootAXNode",
+ "description": "Fetches the root node.\nRequires `enable()` to have been called previously.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "frameId",
+ "description": "The frame in whose document the node resides.\nIf omitted, the root frame is used.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "node",
+ "$ref": "AXNode"
+ }
+ ]
+ },
+ {
+ "name": "getAXNodeAndAncestors",
+ "description": "Fetches a node and all ancestors up to and including the root.\nRequires `enable()` to have been called previously.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node to get.",
+ "optional": true,
+ "$ref": "DOM.NodeId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node to get.",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper to get.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodes",
+ "type": "array",
+ "items": {
+ "$ref": "AXNode"
+ }
+ }
+ ]
+ },
+ {
+ "name": "getChildAXNodes",
+ "description": "Fetches a particular accessibility node by AXNodeId.\nRequires `enable()` to have been called previously.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "id",
+ "$ref": "AXNodeId"
+ },
+ {
+ "name": "frameId",
+ "description": "The frame in whose document the node resides.\nIf omitted, the root frame is used.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodes",
+ "type": "array",
+ "items": {
+ "$ref": "AXNode"
+ }
+ }
+ ]
+ },
+ {
+ "name": "queryAXTree",
+ "description": "Query a DOM node's accessibility subtree for accessible name and role.\nThis command computes the name and role for all nodes in the subtree, including those that are\nignored for accessibility, and returns those that mactch the specified name and role. If no DOM\nnode is specified, or the DOM node does not exist, the command returns an error. If neither\n`accessibleName` or `role` is specified, it returns all the accessibility nodes in the subtree.",
"experimental": true,
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node for the root to query.",
+ "optional": true,
+ "$ref": "DOM.NodeId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node for the root to query.",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper for the root to query.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
+ },
+ {
+ "name": "accessibleName",
+ "description": "Find nodes with this computed name.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "role",
+ "description": "Find nodes with this computed role.",
+ "optional": true,
+ "type": "string"
+ }
+ ],
"returns": [
{
"name": "nodes",
+ "description": "A list of `Accessibility.AXNode` matching the specified attributes,\nincluding nodes that are ignored for accessibility.",
+ "type": "array",
+ "items": {
+ "$ref": "AXNode"
+ }
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "loadComplete",
+ "description": "The loadComplete event mirrors the load complete event sent by the browser to assistive\ntechnology when the web page has finished loading.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "root",
+ "description": "New document root node.",
+ "$ref": "AXNode"
+ }
+ ]
+ },
+ {
+ "name": "nodesUpdated",
+ "description": "The nodesUpdated event is sent every time a previously requested node has changed the in tree.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "nodes",
+ "description": "Updated node data.",
"type": "array",
"items": {
"$ref": "AXNode"
@@ -738,2047 +928,2486 @@
]
},
{
- "domain": "ApplicationCache",
+ "domain": "Audits",
+ "description": "Audits domain allows investigation of page violations and possible improvements.",
"experimental": true,
+ "dependencies": [
+ "Network"
+ ],
"types": [
{
- "id": "ApplicationCacheResource",
- "description": "Detailed application cache resource information.",
+ "id": "AffectedCookie",
+ "description": "Information about a cookie that is affected by an inspector issue.",
"type": "object",
"properties": [
{
- "name": "url",
- "description": "Resource url.",
+ "name": "name",
+ "description": "The following three properties uniquely identify a cookie",
"type": "string"
},
{
- "name": "size",
- "description": "Resource size.",
- "type": "integer"
+ "name": "path",
+ "type": "string"
},
{
- "name": "type",
- "description": "Resource type.",
+ "name": "domain",
"type": "string"
}
]
},
{
- "id": "ApplicationCache",
- "description": "Detailed application cache information.",
+ "id": "AffectedRequest",
+ "description": "Information about a request that is affected by an inspector issue.",
"type": "object",
"properties": [
{
- "name": "manifestURL",
- "description": "Manifest URL.",
- "type": "string"
- },
- {
- "name": "size",
- "description": "Application cache size.",
- "type": "number"
- },
- {
- "name": "creationTime",
- "description": "Application cache creation time.",
- "type": "number"
- },
- {
- "name": "updateTime",
- "description": "Application cache update time.",
- "type": "number"
+ "name": "requestId",
+ "description": "The unique request id.",
+ "$ref": "Network.RequestId"
},
{
- "name": "resources",
- "description": "Application cache resources.",
- "type": "array",
- "items": {
- "$ref": "ApplicationCacheResource"
- }
+ "name": "url",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "id": "FrameWithManifest",
- "description": "Frame identifier - manifest URL pair.",
+ "id": "AffectedFrame",
+ "description": "Information about the frame affected by an inspector issue.",
"type": "object",
"properties": [
{
"name": "frameId",
- "description": "Frame identifier.",
"$ref": "Page.FrameId"
- },
- {
- "name": "manifestURL",
- "description": "Manifest URL.",
- "type": "string"
- },
- {
- "name": "status",
- "description": "Application cache status.",
- "type": "integer"
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "enable",
- "description": "Enables application cache domain notifications."
+ "id": "SameSiteCookieExclusionReason",
+ "type": "string",
+ "enum": [
+ "ExcludeSameSiteUnspecifiedTreatedAsLax",
+ "ExcludeSameSiteNoneInsecure",
+ "ExcludeSameSiteLax",
+ "ExcludeSameSiteStrict",
+ "ExcludeInvalidSameParty",
+ "ExcludeSamePartyCrossPartyContext"
+ ]
},
{
- "name": "getApplicationCacheForFrame",
- "description": "Returns relevant application cache data for the document in given frame.",
- "parameters": [
- {
- "name": "frameId",
- "description": "Identifier of the frame containing document whose application cache is retrieved.",
- "$ref": "Page.FrameId"
- }
- ],
- "returns": [
- {
- "name": "applicationCache",
- "description": "Relevant application cache data for the document in given frame.",
- "$ref": "ApplicationCache"
- }
+ "id": "SameSiteCookieWarningReason",
+ "type": "string",
+ "enum": [
+ "WarnSameSiteUnspecifiedCrossSiteContext",
+ "WarnSameSiteNoneInsecure",
+ "WarnSameSiteUnspecifiedLaxAllowUnsafe",
+ "WarnSameSiteStrictLaxDowngradeStrict",
+ "WarnSameSiteStrictCrossDowngradeStrict",
+ "WarnSameSiteStrictCrossDowngradeLax",
+ "WarnSameSiteLaxCrossDowngradeStrict",
+ "WarnSameSiteLaxCrossDowngradeLax"
]
},
{
- "name": "getFramesWithManifests",
- "description": "Returns array of frame identifiers with manifest urls for each frame containing a document\nassociated with some application cache.",
- "returns": [
- {
- "name": "frameIds",
- "description": "Array of frame identifiers with manifest urls for each frame containing a document\nassociated with some application cache.",
- "type": "array",
- "items": {
- "$ref": "FrameWithManifest"
- }
- }
+ "id": "SameSiteCookieOperation",
+ "type": "string",
+ "enum": [
+ "SetCookie",
+ "ReadCookie"
]
},
{
- "name": "getManifestForFrame",
- "description": "Returns manifest URL for document in the given frame.",
- "parameters": [
+ "id": "SameSiteCookieIssueDetails",
+ "description": "This information is currently necessary, as the front-end has a difficult\ntime finding a specific cookie. With this, we can convey specific error\ninformation without the cookie.",
+ "type": "object",
+ "properties": [
{
- "name": "frameId",
- "description": "Identifier of the frame containing document whose manifest is retrieved.",
- "$ref": "Page.FrameId"
- }
- ],
- "returns": [
+ "name": "cookie",
+ "description": "If AffectedCookie is not set then rawCookieLine contains the raw\nSet-Cookie header string. This hints at a problem where the\ncookie line is syntactically or semantically malformed in a way\nthat no valid cookie could be created.",
+ "optional": true,
+ "$ref": "AffectedCookie"
+ },
{
- "name": "manifestURL",
- "description": "Manifest URL for document in the given frame.",
+ "name": "rawCookieLine",
+ "optional": true,
"type": "string"
- }
- ]
- }
- ],
- "events": [
- {
- "name": "applicationCacheStatusUpdated",
- "parameters": [
+ },
{
- "name": "frameId",
- "description": "Identifier of the frame containing document whose application cache updated status.",
- "$ref": "Page.FrameId"
+ "name": "cookieWarningReasons",
+ "type": "array",
+ "items": {
+ "$ref": "SameSiteCookieWarningReason"
+ }
},
{
- "name": "manifestURL",
- "description": "Manifest URL.",
+ "name": "cookieExclusionReasons",
+ "type": "array",
+ "items": {
+ "$ref": "SameSiteCookieExclusionReason"
+ }
+ },
+ {
+ "name": "operation",
+ "description": "Optionally identifies the site-for-cookies and the cookie url, which\nmay be used by the front-end as additional context.",
+ "$ref": "SameSiteCookieOperation"
+ },
+ {
+ "name": "siteForCookies",
+ "optional": true,
"type": "string"
},
{
- "name": "status",
- "description": "Updated application cache status.",
- "type": "integer"
+ "name": "cookieUrl",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "request",
+ "optional": true,
+ "$ref": "AffectedRequest"
}
]
},
{
- "name": "networkStateUpdated",
- "parameters": [
- {
- "name": "isNowOnline",
- "type": "boolean"
- }
+ "id": "MixedContentResolutionStatus",
+ "type": "string",
+ "enum": [
+ "MixedContentBlocked",
+ "MixedContentAutomaticallyUpgraded",
+ "MixedContentWarning"
]
- }
- ]
- },
- {
- "domain": "Audits",
- "description": "Audits domain allows investigation of page violations and possible improvements.",
- "experimental": true,
- "dependencies": [
- "Network"
- ],
- "commands": [
+ },
{
- "name": "getEncodedResponse",
- "description": "Returns the response body and size if it were re-encoded with the specified settings. Only\napplies to images.",
- "parameters": [
+ "id": "MixedContentResourceType",
+ "type": "string",
+ "enum": [
+ "Audio",
+ "Beacon",
+ "CSPReport",
+ "Download",
+ "EventSource",
+ "Favicon",
+ "Font",
+ "Form",
+ "Frame",
+ "Image",
+ "Import",
+ "Manifest",
+ "Ping",
+ "PluginData",
+ "PluginResource",
+ "Prefetch",
+ "Resource",
+ "Script",
+ "ServiceWorker",
+ "SharedWorker",
+ "Stylesheet",
+ "Track",
+ "Video",
+ "Worker",
+ "XMLHttpRequest",
+ "XSLT"
+ ]
+ },
+ {
+ "id": "MixedContentIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "requestId",
- "description": "Identifier of the network request to get content for.",
- "$ref": "Network.RequestId"
+ "name": "resourceType",
+ "description": "The type of resource causing the mixed content issue (css, js, iframe,\nform,...). Marked as optional because it is mapped to from\nblink::mojom::RequestContextType, which will be replaced\nby network::mojom::RequestDestination",
+ "optional": true,
+ "$ref": "MixedContentResourceType"
},
{
- "name": "encoding",
- "description": "The encoding to use.",
- "type": "string",
- "enum": [
- "webp",
- "jpeg",
- "png"
- ]
+ "name": "resolutionStatus",
+ "description": "The way the mixed content issue is being resolved.",
+ "$ref": "MixedContentResolutionStatus"
},
{
- "name": "quality",
- "description": "The quality of the encoding (0-1). (defaults to 1)",
+ "name": "insecureURL",
+ "description": "The unsafe http url causing the mixed content issue.",
+ "type": "string"
+ },
+ {
+ "name": "mainResourceURL",
+ "description": "The url responsible for the call to an unsafe url.",
+ "type": "string"
+ },
+ {
+ "name": "request",
+ "description": "The mixed content request.\nDoes not always exist (e.g. for unsafe form submission urls).",
"optional": true,
- "type": "number"
+ "$ref": "AffectedRequest"
},
{
- "name": "sizeOnly",
- "description": "Whether to only return the size information (defaults to false).",
+ "name": "frame",
+ "description": "Optional because not every mixed content issue is necessarily linked to a frame.",
"optional": true,
- "type": "boolean"
+ "$ref": "AffectedFrame"
}
- ],
- "returns": [
+ ]
+ },
+ {
+ "id": "BlockedByResponseReason",
+ "description": "Enum indicating the reason a response has been blocked. These reasons are\nrefinements of the net error BLOCKED_BY_RESPONSE.",
+ "type": "string",
+ "enum": [
+ "CoepFrameResourceNeedsCoepHeader",
+ "CoopSandboxedIFrameCannotNavigateToCoopPage",
+ "CorpNotSameOrigin",
+ "CorpNotSameOriginAfterDefaultedToSameOriginByCoep",
+ "CorpNotSameSite"
+ ]
+ },
+ {
+ "id": "BlockedByResponseIssueDetails",
+ "description": "Details for a request that has been blocked with the BLOCKED_BY_RESPONSE\ncode. Currently only used for COEP/COOP, but may be extended to include\nsome CSP errors in the future.",
+ "type": "object",
+ "properties": [
{
- "name": "body",
- "description": "The encoded body as a base64 string. Omitted if sizeOnly is true.",
+ "name": "request",
+ "$ref": "AffectedRequest"
+ },
+ {
+ "name": "parentFrame",
"optional": true,
- "type": "string"
+ "$ref": "AffectedFrame"
},
{
- "name": "originalSize",
- "description": "Size before re-encoding.",
- "type": "integer"
+ "name": "blockedFrame",
+ "optional": true,
+ "$ref": "AffectedFrame"
},
{
- "name": "encodedSize",
- "description": "Size after re-encoding.",
- "type": "integer"
+ "name": "reason",
+ "$ref": "BlockedByResponseReason"
}
]
- }
- ]
- },
- {
- "domain": "BackgroundService",
- "description": "Defines events for background web platform features.",
- "experimental": true,
- "types": [
+ },
{
- "id": "ServiceName",
- "description": "The Background Service that will be associated with the commands/events.\nEvery Background Service operates independently, but they share the same\nAPI.",
+ "id": "HeavyAdResolutionStatus",
"type": "string",
"enum": [
- "backgroundFetch",
- "backgroundSync",
- "pushMessaging",
- "notifications",
- "paymentHandler"
+ "HeavyAdBlocked",
+ "HeavyAdWarning"
]
},
{
- "id": "EventMetadata",
- "description": "A key-value pair for additional event information to pass along.",
+ "id": "HeavyAdReason",
+ "type": "string",
+ "enum": [
+ "NetworkTotalLimit",
+ "CpuTotalLimit",
+ "CpuPeakLimit"
+ ]
+ },
+ {
+ "id": "HeavyAdIssueDetails",
"type": "object",
"properties": [
{
- "name": "key",
- "type": "string"
+ "name": "resolution",
+ "description": "The resolution status, either blocking the content or warning.",
+ "$ref": "HeavyAdResolutionStatus"
},
{
- "name": "value",
- "type": "string"
+ "name": "reason",
+ "description": "The reason the ad was blocked, total network or cpu or peak cpu.",
+ "$ref": "HeavyAdReason"
+ },
+ {
+ "name": "frame",
+ "description": "The frame that was blocked.",
+ "$ref": "AffectedFrame"
}
]
},
{
- "id": "BackgroundServiceEvent",
+ "id": "ContentSecurityPolicyViolationType",
+ "type": "string",
+ "enum": [
+ "kInlineViolation",
+ "kEvalViolation",
+ "kURLViolation",
+ "kTrustedTypesSinkViolation",
+ "kTrustedTypesPolicyViolation",
+ "kWasmEvalViolation"
+ ]
+ },
+ {
+ "id": "SourceCodeLocation",
"type": "object",
"properties": [
{
- "name": "timestamp",
- "description": "Timestamp of the event (in seconds).",
- "$ref": "Network.TimeSinceEpoch"
+ "name": "scriptId",
+ "optional": true,
+ "$ref": "Runtime.ScriptId"
},
{
- "name": "origin",
- "description": "The origin this event belongs to.",
+ "name": "url",
"type": "string"
},
{
- "name": "serviceWorkerRegistrationId",
- "description": "The Service Worker ID that initiated the event.",
- "$ref": "ServiceWorker.RegistrationID"
+ "name": "lineNumber",
+ "type": "integer"
},
{
- "name": "service",
- "description": "The Background Service this event belongs to.",
- "$ref": "ServiceName"
- },
+ "name": "columnNumber",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "id": "ContentSecurityPolicyIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "eventName",
- "description": "A description of the event.",
+ "name": "blockedURL",
+ "description": "The url not included in allowed sources.",
+ "optional": true,
"type": "string"
},
{
- "name": "instanceId",
- "description": "An identifier that groups related events together.",
+ "name": "violatedDirective",
+ "description": "Specific directive that is violated, causing the CSP issue.",
"type": "string"
},
{
- "name": "eventMetadata",
- "description": "A list of event-specific information.",
- "type": "array",
- "items": {
- "$ref": "EventMetadata"
- }
- }
- ]
- }
- ],
- "commands": [
- {
- "name": "startObserving",
- "description": "Enables event updates for the service.",
- "parameters": [
+ "name": "isReportOnly",
+ "type": "boolean"
+ },
{
- "name": "service",
- "$ref": "ServiceName"
+ "name": "contentSecurityPolicyViolationType",
+ "$ref": "ContentSecurityPolicyViolationType"
+ },
+ {
+ "name": "frameAncestor",
+ "optional": true,
+ "$ref": "AffectedFrame"
+ },
+ {
+ "name": "sourceCodeLocation",
+ "optional": true,
+ "$ref": "SourceCodeLocation"
+ },
+ {
+ "name": "violatingNodeId",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
}
]
},
{
- "name": "stopObserving",
- "description": "Disables event updates for the service.",
- "parameters": [
- {
- "name": "service",
- "$ref": "ServiceName"
- }
+ "id": "SharedArrayBufferIssueType",
+ "type": "string",
+ "enum": [
+ "TransferIssue",
+ "CreationIssue"
]
},
{
- "name": "setRecording",
- "description": "Set the recording state for the service.",
- "parameters": [
+ "id": "SharedArrayBufferIssueDetails",
+ "description": "Details for a issue arising from an SAB being instantiated in, or\ntransferred to a context that is not cross-origin isolated.",
+ "type": "object",
+ "properties": [
{
- "name": "shouldRecord",
+ "name": "sourceCodeLocation",
+ "$ref": "SourceCodeLocation"
+ },
+ {
+ "name": "isWarning",
"type": "boolean"
},
{
- "name": "service",
- "$ref": "ServiceName"
+ "name": "type",
+ "$ref": "SharedArrayBufferIssueType"
}
]
},
{
- "name": "clearEvents",
- "description": "Clears all stored data for the service.",
- "parameters": [
- {
- "name": "service",
- "$ref": "ServiceName"
- }
+ "id": "TwaQualityEnforcementViolationType",
+ "type": "string",
+ "enum": [
+ "kHttpError",
+ "kUnavailableOffline",
+ "kDigitalAssetLinks"
]
- }
- ],
- "events": [
+ },
{
- "name": "recordingStateChanged",
- "description": "Called when the recording state for the service has been updated.",
- "parameters": [
+ "id": "TrustedWebActivityIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "isRecording",
- "type": "boolean"
+ "name": "url",
+ "description": "The url that triggers the violation.",
+ "type": "string"
},
{
- "name": "service",
- "$ref": "ServiceName"
+ "name": "violationType",
+ "$ref": "TwaQualityEnforcementViolationType"
+ },
+ {
+ "name": "httpStatusCode",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "packageName",
+ "description": "The package name of the Trusted Web Activity client app. This field is\nonly used when violation type is kDigitalAssetLinks.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "signature",
+ "description": "The signature of the Trusted Web Activity client app. This field is only\nused when violation type is kDigitalAssetLinks.",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "name": "backgroundServiceEventReceived",
- "description": "Called with all existing backgroundServiceEvents when enabled, and all new\nevents afterwards if enabled and recording.",
- "parameters": [
+ "id": "LowTextContrastIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "backgroundServiceEvent",
- "$ref": "BackgroundServiceEvent"
- }
- ]
- }
- ]
- },
- {
- "domain": "Browser",
- "description": "The Browser domain defines methods and events for browser managing.",
- "types": [
- {
- "id": "WindowID",
- "experimental": true,
- "type": "integer"
- },
- {
- "id": "WindowState",
- "description": "The state of the browser window.",
- "experimental": true,
- "type": "string",
- "enum": [
- "normal",
- "minimized",
- "maximized",
- "fullscreen"
+ "name": "violatingNodeId",
+ "$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "violatingNodeSelector",
+ "type": "string"
+ },
+ {
+ "name": "contrastRatio",
+ "type": "number"
+ },
+ {
+ "name": "thresholdAA",
+ "type": "number"
+ },
+ {
+ "name": "thresholdAAA",
+ "type": "number"
+ },
+ {
+ "name": "fontSize",
+ "type": "string"
+ },
+ {
+ "name": "fontWeight",
+ "type": "string"
+ }
]
},
{
- "id": "Bounds",
- "description": "Browser window bounds information",
- "experimental": true,
+ "id": "CorsIssueDetails",
+ "description": "Details for a CORS related issue, e.g. a warning or error related to\nCORS RFC1918 enforcement.",
"type": "object",
"properties": [
{
- "name": "left",
- "description": "The offset from the left edge of the screen to the window in pixels.",
- "optional": true,
- "type": "integer"
+ "name": "corsErrorStatus",
+ "$ref": "Network.CorsErrorStatus"
},
{
- "name": "top",
- "description": "The offset from the top edge of the screen to the window in pixels.",
+ "name": "isWarning",
+ "type": "boolean"
+ },
+ {
+ "name": "request",
+ "$ref": "AffectedRequest"
+ },
+ {
+ "name": "location",
"optional": true,
- "type": "integer"
+ "$ref": "SourceCodeLocation"
},
{
- "name": "width",
- "description": "The window width in pixels.",
+ "name": "initiatorOrigin",
"optional": true,
- "type": "integer"
+ "type": "string"
},
{
- "name": "height",
- "description": "The window height in pixels.",
+ "name": "resourceIPAddressSpace",
"optional": true,
- "type": "integer"
+ "$ref": "Network.IPAddressSpace"
},
{
- "name": "windowState",
- "description": "The window state. Default to normal.",
+ "name": "clientSecurityState",
"optional": true,
- "$ref": "WindowState"
+ "$ref": "Network.ClientSecurityState"
}
]
},
{
- "id": "PermissionType",
- "experimental": true,
+ "id": "AttributionReportingIssueType",
"type": "string",
"enum": [
- "accessibilityEvents",
- "audioCapture",
- "backgroundSync",
- "backgroundFetch",
- "clipboardRead",
- "clipboardWrite",
- "durableStorage",
- "flash",
- "geolocation",
- "midi",
- "midiSysex",
- "notifications",
- "paymentHandler",
- "periodicBackgroundSync",
- "protectedMediaIdentifier",
- "sensors",
- "videoCapture",
- "idleDetection",
- "wakeLockScreen",
- "wakeLockSystem"
- ]
- },
- {
- "id": "Bucket",
- "description": "Chrome histogram bucket.",
- "experimental": true,
+ "PermissionPolicyDisabled",
+ "InvalidAttributionSourceEventId",
+ "InvalidAttributionData",
+ "AttributionSourceUntrustworthyOrigin",
+ "AttributionUntrustworthyOrigin",
+ "AttributionTriggerDataTooLarge",
+ "AttributionEventSourceTriggerDataTooLarge",
+ "InvalidAttributionSourceExpiry",
+ "InvalidAttributionSourcePriority",
+ "InvalidEventSourceTriggerData",
+ "InvalidTriggerPriority",
+ "InvalidTriggerDedupKey"
+ ]
+ },
+ {
+ "id": "AttributionReportingIssueDetails",
+ "description": "Details for issues around \"Attribution Reporting API\" usage.\nExplainer: https://github.com/WICG/conversion-measurement-api",
"type": "object",
"properties": [
{
- "name": "low",
- "description": "Minimum value (inclusive).",
- "type": "integer"
+ "name": "violationType",
+ "$ref": "AttributionReportingIssueType"
},
{
- "name": "high",
- "description": "Maximum value (exclusive).",
- "type": "integer"
+ "name": "frame",
+ "optional": true,
+ "$ref": "AffectedFrame"
},
{
- "name": "count",
- "description": "Number of samples.",
- "type": "integer"
+ "name": "request",
+ "optional": true,
+ "$ref": "AffectedRequest"
+ },
+ {
+ "name": "violatingNodeId",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "invalidParameter",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "id": "Histogram",
- "description": "Chrome histogram.",
- "experimental": true,
+ "id": "QuirksModeIssueDetails",
+ "description": "Details for issues about documents in Quirks Mode\nor Limited Quirks Mode that affects page layouting.",
"type": "object",
"properties": [
{
- "name": "name",
- "description": "Name.",
- "type": "string"
+ "name": "isLimitedQuirksMode",
+ "description": "If false, it means the document's mode is \"quirks\"\ninstead of \"limited-quirks\".",
+ "type": "boolean"
},
{
- "name": "sum",
- "description": "Sum of sample values.",
- "type": "integer"
+ "name": "documentNodeId",
+ "$ref": "DOM.BackendNodeId"
},
{
- "name": "count",
- "description": "Total number of samples.",
- "type": "integer"
+ "name": "url",
+ "type": "string"
},
{
- "name": "buckets",
- "description": "Buckets.",
- "type": "array",
- "items": {
- "$ref": "Bucket"
- }
+ "name": "frameId",
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "loaderId",
+ "$ref": "Network.LoaderId"
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "grantPermissions",
- "description": "Grant specific permissions to the given origin and reject all others.",
- "experimental": true,
- "parameters": [
+ "id": "NavigatorUserAgentIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "origin",
+ "name": "url",
"type": "string"
},
{
- "name": "permissions",
- "type": "array",
- "items": {
- "$ref": "PermissionType"
- }
- },
- {
- "name": "browserContextId",
- "description": "BrowserContext to override permissions. When omitted, default browser context is used.",
+ "name": "location",
"optional": true,
- "$ref": "Target.BrowserContextID"
+ "$ref": "SourceCodeLocation"
}
]
},
{
- "name": "resetPermissions",
- "description": "Reset all permission management for all origins.",
- "experimental": true,
- "parameters": [
+ "id": "GenericIssueErrorType",
+ "type": "string",
+ "enum": [
+ "CrossOriginPortalPostMessageError"
+ ]
+ },
+ {
+ "id": "GenericIssueDetails",
+ "description": "Depending on the concrete errorType, different properties are set.",
+ "type": "object",
+ "properties": [
{
- "name": "browserContextId",
- "description": "BrowserContext to reset permissions. When omitted, default browser context is used.",
+ "name": "errorType",
+ "description": "Issues with the same errorType are aggregated in the frontend.",
+ "$ref": "GenericIssueErrorType"
+ },
+ {
+ "name": "frameId",
"optional": true,
- "$ref": "Target.BrowserContextID"
+ "$ref": "Page.FrameId"
}
]
},
{
- "name": "close",
- "description": "Close browser gracefully."
- },
- {
- "name": "crash",
- "description": "Crashes browser on the main thread.",
- "experimental": true
- },
- {
- "name": "crashGpuProcess",
- "description": "Crashes GPU process.",
- "experimental": true
- },
- {
- "name": "getVersion",
- "description": "Returns version information.",
- "returns": [
- {
- "name": "protocolVersion",
- "description": "Protocol version.",
- "type": "string"
- },
+ "id": "DeprecationIssueDetails",
+ "description": "This issue tracks information needed to print a deprecation message.\nThe formatting is inherited from the old console.log version, see more at:\nhttps://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/deprecation.cc\nTODO(crbug.com/1264960): Re-work format to add i18n support per:\nhttps://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/public/devtools_protocol/README.md",
+ "type": "object",
+ "properties": [
{
- "name": "product",
- "description": "Product name.",
- "type": "string"
+ "name": "affectedFrame",
+ "optional": true,
+ "$ref": "AffectedFrame"
},
{
- "name": "revision",
- "description": "Product revision.",
- "type": "string"
+ "name": "sourceCodeLocation",
+ "$ref": "SourceCodeLocation"
},
{
- "name": "userAgent",
- "description": "User-Agent.",
+ "name": "message",
+ "description": "The content of the deprecation issue (this won't be translated),\ne.g. \"window.inefficientLegacyStorageMethod will be removed in M97,\naround January 2022. Please use Web Storage or Indexed Database\ninstead. This standard was abandoned in January, 1970. See\nhttps://www.chromestatus.com/feature/5684870116278272 for more details.\"",
+ "deprecated": true,
+ "optional": true,
"type": "string"
},
{
- "name": "jsVersion",
- "description": "V8 version.",
+ "name": "deprecationType",
"type": "string"
}
]
},
{
- "name": "getBrowserCommandLine",
- "description": "Returns the command line switches for the browser process if, and only if\n--enable-automation is on the commandline.",
- "experimental": true,
- "returns": [
+ "id": "ClientHintIssueReason",
+ "type": "string",
+ "enum": [
+ "MetaTagAllowListInvalidOrigin",
+ "MetaTagModifiedHTML"
+ ]
+ },
+ {
+ "id": "FederatedAuthRequestIssueDetails",
+ "type": "object",
+ "properties": [
{
- "name": "arguments",
- "description": "Commandline parameters",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "federatedAuthRequestIssueReason",
+ "$ref": "FederatedAuthRequestIssueReason"
}
]
},
{
- "name": "getHistograms",
- "description": "Get Chrome histograms.",
- "experimental": true,
- "parameters": [
+ "id": "FederatedAuthRequestIssueReason",
+ "description": "Represents the failure reason when a federated authentication reason fails.\nShould be updated alongside RequestIdTokenStatus in\nthird_party/blink/public/mojom/webid/federated_auth_request.mojom to include\nall cases except for success.",
+ "type": "string",
+ "enum": [
+ "ApprovalDeclined",
+ "TooManyRequests",
+ "ManifestHttpNotFound",
+ "ManifestNoResponse",
+ "ManifestInvalidResponse",
+ "ClientMetadataHttpNotFound",
+ "ClientMetadataNoResponse",
+ "ClientMetadataInvalidResponse",
+ "ErrorFetchingSignin",
+ "InvalidSigninResponse",
+ "AccountsHttpNotFound",
+ "AccountsNoResponse",
+ "AccountsInvalidResponse",
+ "IdTokenHttpNotFound",
+ "IdTokenNoResponse",
+ "IdTokenInvalidResponse",
+ "IdTokenInvalidRequest",
+ "ErrorIdToken",
+ "Canceled"
+ ]
+ },
+ {
+ "id": "ClientHintIssueDetails",
+ "description": "This issue tracks client hints related issues. It's used to deprecate old\nfeatures, encourage the use of new ones, and provide general guidance.",
+ "type": "object",
+ "properties": [
{
- "name": "query",
- "description": "Requested substring in name. Only histograms which have query as a\nsubstring in their name are extracted. An empty or absent query returns\nall histograms.",
- "optional": true,
- "type": "string"
+ "name": "sourceCodeLocation",
+ "$ref": "SourceCodeLocation"
},
{
- "name": "delta",
- "description": "If true, retrieve delta since last call.",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
- {
- "name": "histograms",
- "description": "Histograms.",
- "type": "array",
- "items": {
- "$ref": "Histogram"
- }
+ "name": "clientHintIssueReason",
+ "$ref": "ClientHintIssueReason"
}
]
},
{
- "name": "getHistogram",
- "description": "Get a Chrome histogram by name.",
- "experimental": true,
- "parameters": [
+ "id": "InspectorIssueCode",
+ "description": "A unique identifier for the type of issue. Each type may use one of the\noptional fields in InspectorIssueDetails to convey more specific\ninformation about the kind of issue.",
+ "type": "string",
+ "enum": [
+ "SameSiteCookieIssue",
+ "MixedContentIssue",
+ "BlockedByResponseIssue",
+ "HeavyAdIssue",
+ "ContentSecurityPolicyIssue",
+ "SharedArrayBufferIssue",
+ "TrustedWebActivityIssue",
+ "LowTextContrastIssue",
+ "CorsIssue",
+ "AttributionReportingIssue",
+ "QuirksModeIssue",
+ "NavigatorUserAgentIssue",
+ "GenericIssue",
+ "DeprecationIssue",
+ "ClientHintIssue",
+ "FederatedAuthRequestIssue"
+ ]
+ },
+ {
+ "id": "InspectorIssueDetails",
+ "description": "This struct holds a list of optional fields with additional information\nspecific to the kind of issue. When adding a new issue code, please also\nadd a new optional field to this type.",
+ "type": "object",
+ "properties": [
{
- "name": "name",
- "description": "Requested histogram name.",
- "type": "string"
+ "name": "sameSiteCookieIssueDetails",
+ "optional": true,
+ "$ref": "SameSiteCookieIssueDetails"
},
{
- "name": "delta",
- "description": "If true, retrieve delta since last call.",
+ "name": "mixedContentIssueDetails",
"optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
+ "$ref": "MixedContentIssueDetails"
+ },
{
- "name": "histogram",
- "description": "Histogram.",
- "$ref": "Histogram"
+ "name": "blockedByResponseIssueDetails",
+ "optional": true,
+ "$ref": "BlockedByResponseIssueDetails"
+ },
+ {
+ "name": "heavyAdIssueDetails",
+ "optional": true,
+ "$ref": "HeavyAdIssueDetails"
+ },
+ {
+ "name": "contentSecurityPolicyIssueDetails",
+ "optional": true,
+ "$ref": "ContentSecurityPolicyIssueDetails"
+ },
+ {
+ "name": "sharedArrayBufferIssueDetails",
+ "optional": true,
+ "$ref": "SharedArrayBufferIssueDetails"
+ },
+ {
+ "name": "twaQualityEnforcementDetails",
+ "optional": true,
+ "$ref": "TrustedWebActivityIssueDetails"
+ },
+ {
+ "name": "lowTextContrastIssueDetails",
+ "optional": true,
+ "$ref": "LowTextContrastIssueDetails"
+ },
+ {
+ "name": "corsIssueDetails",
+ "optional": true,
+ "$ref": "CorsIssueDetails"
+ },
+ {
+ "name": "attributionReportingIssueDetails",
+ "optional": true,
+ "$ref": "AttributionReportingIssueDetails"
+ },
+ {
+ "name": "quirksModeIssueDetails",
+ "optional": true,
+ "$ref": "QuirksModeIssueDetails"
+ },
+ {
+ "name": "navigatorUserAgentIssueDetails",
+ "optional": true,
+ "$ref": "NavigatorUserAgentIssueDetails"
+ },
+ {
+ "name": "genericIssueDetails",
+ "optional": true,
+ "$ref": "GenericIssueDetails"
+ },
+ {
+ "name": "deprecationIssueDetails",
+ "optional": true,
+ "$ref": "DeprecationIssueDetails"
+ },
+ {
+ "name": "clientHintIssueDetails",
+ "optional": true,
+ "$ref": "ClientHintIssueDetails"
+ },
+ {
+ "name": "federatedAuthRequestIssueDetails",
+ "optional": true,
+ "$ref": "FederatedAuthRequestIssueDetails"
}
]
},
{
- "name": "getWindowBounds",
- "description": "Get position and size of the browser window.",
- "experimental": true,
- "parameters": [
+ "id": "IssueId",
+ "description": "A unique id for a DevTools inspector issue. Allows other entities (e.g.\nexceptions, CDP message, console messages, etc.) to reference an issue.",
+ "type": "string"
+ },
+ {
+ "id": "InspectorIssue",
+ "description": "An inspector issue reported from the back-end.",
+ "type": "object",
+ "properties": [
{
- "name": "windowId",
- "description": "Browser window id.",
- "$ref": "WindowID"
- }
- ],
- "returns": [
+ "name": "code",
+ "$ref": "InspectorIssueCode"
+ },
{
- "name": "bounds",
- "description": "Bounds information of the window. When window state is 'minimized', the restored window\nposition and size are returned.",
- "$ref": "Bounds"
+ "name": "details",
+ "$ref": "InspectorIssueDetails"
+ },
+ {
+ "name": "issueId",
+ "description": "A unique id for this issue. May be omitted if no other entity (e.g.\nexception, CDP message, etc.) is referencing this issue.",
+ "optional": true,
+ "$ref": "IssueId"
}
]
- },
+ }
+ ],
+ "commands": [
{
- "name": "getWindowForTarget",
- "description": "Get the browser window that contains the devtools target.",
- "experimental": true,
+ "name": "getEncodedResponse",
+ "description": "Returns the response body and size if it were re-encoded with the specified settings. Only\napplies to images.",
"parameters": [
{
- "name": "targetId",
- "description": "Devtools agent host id. If called as a part of the session, associated targetId is used.",
+ "name": "requestId",
+ "description": "Identifier of the network request to get content for.",
+ "$ref": "Network.RequestId"
+ },
+ {
+ "name": "encoding",
+ "description": "The encoding to use.",
+ "type": "string",
+ "enum": [
+ "webp",
+ "jpeg",
+ "png"
+ ]
+ },
+ {
+ "name": "quality",
+ "description": "The quality of the encoding (0-1). (defaults to 1)",
"optional": true,
- "$ref": "Target.TargetID"
+ "type": "number"
+ },
+ {
+ "name": "sizeOnly",
+ "description": "Whether to only return the size information (defaults to false).",
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
{
- "name": "windowId",
- "description": "Browser window id.",
- "$ref": "WindowID"
+ "name": "body",
+ "description": "The encoded body as a base64 string. Omitted if sizeOnly is true. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
},
{
- "name": "bounds",
- "description": "Bounds information of the window. When window state is 'minimized', the restored window\nposition and size are returned.",
- "$ref": "Bounds"
+ "name": "originalSize",
+ "description": "Size before re-encoding.",
+ "type": "integer"
+ },
+ {
+ "name": "encodedSize",
+ "description": "Size after re-encoding.",
+ "type": "integer"
}
]
},
{
- "name": "setWindowBounds",
- "description": "Set position and/or size of the browser window.",
- "experimental": true,
+ "name": "disable",
+ "description": "Disables issues domain, prevents further issues from being reported to the client."
+ },
+ {
+ "name": "enable",
+ "description": "Enables issues domain, sends the issues collected so far to the client by means of the\n`issueAdded` event."
+ },
+ {
+ "name": "checkContrast",
+ "description": "Runs the contrast check for the target page. Found issues are reported\nusing Audits.issueAdded event.",
"parameters": [
{
- "name": "windowId",
- "description": "Browser window id.",
- "$ref": "WindowID"
- },
- {
- "name": "bounds",
- "description": "New window bounds. The 'minimized', 'maximized' and 'fullscreen' states cannot be combined\nwith 'left', 'top', 'width' or 'height'. Leaves unspecified fields unchanged.",
- "$ref": "Bounds"
+ "name": "reportAAA",
+ "description": "Whether to report WCAG AAA level issues. Default is false.",
+ "optional": true,
+ "type": "boolean"
}
]
- },
+ }
+ ],
+ "events": [
{
- "name": "setDockTile",
- "description": "Set dock tile details, platform-specific.",
- "experimental": true,
+ "name": "issueAdded",
"parameters": [
{
- "name": "badgeLabel",
- "optional": true,
- "type": "string"
- },
- {
- "name": "image",
- "description": "Png encoded image.",
- "optional": true,
- "type": "string"
+ "name": "issue",
+ "$ref": "InspectorIssue"
}
]
}
]
},
{
- "domain": "CSS",
- "description": "This domain exposes CSS read/write operations. All CSS objects (stylesheets, rules, and styles)\nhave an associated `id` used in subsequent operations on the related object. Each object type has\na specific `id` structure, and those are not interchangeable between objects of different kinds.\nCSS objects can be loaded using the `get*ForNode()` calls (which accept a DOM node id). A client\ncan also keep track of stylesheets via the `styleSheetAdded`/`styleSheetRemoved` events and\nsubsequently load the required stylesheet contents using the `getStyleSheet[Text]()` methods.",
+ "domain": "BackgroundService",
+ "description": "Defines events for background web platform features.",
"experimental": true,
- "dependencies": [
- "DOM"
- ],
"types": [
{
- "id": "StyleSheetId",
- "type": "string"
- },
- {
- "id": "StyleSheetOrigin",
- "description": "Stylesheet type: \"injected\" for stylesheets injected via extension, \"user-agent\" for user-agent\nstylesheets, \"inspector\" for stylesheets created by the inspector (i.e. those holding the \"via\ninspector\" rules), \"regular\" for regular stylesheets.",
+ "id": "ServiceName",
+ "description": "The Background Service that will be associated with the commands/events.\nEvery Background Service operates independently, but they share the same\nAPI.",
"type": "string",
"enum": [
- "injected",
- "user-agent",
- "inspector",
- "regular"
+ "backgroundFetch",
+ "backgroundSync",
+ "pushMessaging",
+ "notifications",
+ "paymentHandler",
+ "periodicBackgroundSync"
]
},
{
- "id": "PseudoElementMatches",
- "description": "CSS rule collection for a single pseudo style.",
+ "id": "EventMetadata",
+ "description": "A key-value pair for additional event information to pass along.",
"type": "object",
"properties": [
{
- "name": "pseudoType",
- "description": "Pseudo element type.",
- "$ref": "DOM.PseudoType"
+ "name": "key",
+ "type": "string"
},
{
- "name": "matches",
- "description": "Matches of CSS rules applicable to the pseudo style.",
- "type": "array",
- "items": {
- "$ref": "RuleMatch"
- }
+ "name": "value",
+ "type": "string"
}
]
},
{
- "id": "InheritedStyleEntry",
- "description": "Inherited CSS rule collection from ancestor node.",
+ "id": "BackgroundServiceEvent",
"type": "object",
"properties": [
{
- "name": "inlineStyle",
- "description": "The ancestor node's inline style, if any, in the style inheritance chain.",
- "optional": true,
- "$ref": "CSSStyle"
+ "name": "timestamp",
+ "description": "Timestamp of the event (in seconds).",
+ "$ref": "Network.TimeSinceEpoch"
},
{
- "name": "matchedCSSRules",
- "description": "Matches of CSS rules matching the ancestor node in the style inheritance chain.",
- "type": "array",
- "items": {
- "$ref": "RuleMatch"
- }
- }
- ]
- },
- {
- "id": "RuleMatch",
- "description": "Match data for a CSS rule.",
- "type": "object",
- "properties": [
- {
- "name": "rule",
- "description": "CSS rule in the match.",
- "$ref": "CSSRule"
+ "name": "origin",
+ "description": "The origin this event belongs to.",
+ "type": "string"
},
{
- "name": "matchingSelectors",
- "description": "Matching selector indices in the rule's selectorList selectors (0-based).",
+ "name": "serviceWorkerRegistrationId",
+ "description": "The Service Worker ID that initiated the event.",
+ "$ref": "ServiceWorker.RegistrationID"
+ },
+ {
+ "name": "service",
+ "description": "The Background Service this event belongs to.",
+ "$ref": "ServiceName"
+ },
+ {
+ "name": "eventName",
+ "description": "A description of the event.",
+ "type": "string"
+ },
+ {
+ "name": "instanceId",
+ "description": "An identifier that groups related events together.",
+ "type": "string"
+ },
+ {
+ "name": "eventMetadata",
+ "description": "A list of event-specific information.",
"type": "array",
"items": {
- "type": "integer"
+ "$ref": "EventMetadata"
}
}
]
+ }
+ ],
+ "commands": [
+ {
+ "name": "startObserving",
+ "description": "Enables event updates for the service.",
+ "parameters": [
+ {
+ "name": "service",
+ "$ref": "ServiceName"
+ }
+ ]
},
{
- "id": "Value",
- "description": "Data for a simple selector (these are delimited by commas in a selector list).",
- "type": "object",
- "properties": [
+ "name": "stopObserving",
+ "description": "Disables event updates for the service.",
+ "parameters": [
{
- "name": "text",
- "description": "Value text.",
- "type": "string"
+ "name": "service",
+ "$ref": "ServiceName"
+ }
+ ]
+ },
+ {
+ "name": "setRecording",
+ "description": "Set the recording state for the service.",
+ "parameters": [
+ {
+ "name": "shouldRecord",
+ "type": "boolean"
},
{
- "name": "range",
- "description": "Value range in the underlying resource (if available).",
- "optional": true,
- "$ref": "SourceRange"
+ "name": "service",
+ "$ref": "ServiceName"
}
]
},
{
- "id": "SelectorList",
- "description": "Selector list data.",
- "type": "object",
- "properties": [
+ "name": "clearEvents",
+ "description": "Clears all stored data for the service.",
+ "parameters": [
{
- "name": "selectors",
- "description": "Selectors in the list.",
- "type": "array",
- "items": {
- "$ref": "Value"
- }
+ "name": "service",
+ "$ref": "ServiceName"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "recordingStateChanged",
+ "description": "Called when the recording state for the service has been updated.",
+ "parameters": [
+ {
+ "name": "isRecording",
+ "type": "boolean"
},
{
- "name": "text",
- "description": "Rule selector text.",
- "type": "string"
+ "name": "service",
+ "$ref": "ServiceName"
}
]
},
{
- "id": "CSSStyleSheetHeader",
- "description": "CSS stylesheet metainformation.",
+ "name": "backgroundServiceEventReceived",
+ "description": "Called with all existing backgroundServiceEvents when enabled, and all new\nevents afterwards if enabled and recording.",
+ "parameters": [
+ {
+ "name": "backgroundServiceEvent",
+ "$ref": "BackgroundServiceEvent"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Browser",
+ "description": "The Browser domain defines methods and events for browser managing.",
+ "types": [
+ {
+ "id": "BrowserContextID",
+ "experimental": true,
+ "type": "string"
+ },
+ {
+ "id": "WindowID",
+ "experimental": true,
+ "type": "integer"
+ },
+ {
+ "id": "WindowState",
+ "description": "The state of the browser window.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "normal",
+ "minimized",
+ "maximized",
+ "fullscreen"
+ ]
+ },
+ {
+ "id": "Bounds",
+ "description": "Browser window bounds information",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "styleSheetId",
- "description": "The stylesheet identifier.",
- "$ref": "StyleSheetId"
+ "name": "left",
+ "description": "The offset from the left edge of the screen to the window in pixels.",
+ "optional": true,
+ "type": "integer"
},
{
- "name": "frameId",
- "description": "Owner frame identifier.",
- "$ref": "Page.FrameId"
+ "name": "top",
+ "description": "The offset from the top edge of the screen to the window in pixels.",
+ "optional": true,
+ "type": "integer"
},
{
- "name": "sourceURL",
- "description": "Stylesheet resource URL.",
- "type": "string"
+ "name": "width",
+ "description": "The window width in pixels.",
+ "optional": true,
+ "type": "integer"
},
{
- "name": "sourceMapURL",
- "description": "URL of source map associated with the stylesheet (if any).",
+ "name": "height",
+ "description": "The window height in pixels.",
"optional": true,
- "type": "string"
+ "type": "integer"
},
{
- "name": "origin",
- "description": "Stylesheet origin.",
- "$ref": "StyleSheetOrigin"
- },
+ "name": "windowState",
+ "description": "The window state. Default to normal.",
+ "optional": true,
+ "$ref": "WindowState"
+ }
+ ]
+ },
+ {
+ "id": "PermissionType",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "accessibilityEvents",
+ "audioCapture",
+ "backgroundSync",
+ "backgroundFetch",
+ "clipboardReadWrite",
+ "clipboardSanitizedWrite",
+ "displayCapture",
+ "durableStorage",
+ "flash",
+ "geolocation",
+ "midi",
+ "midiSysex",
+ "nfc",
+ "notifications",
+ "paymentHandler",
+ "periodicBackgroundSync",
+ "protectedMediaIdentifier",
+ "sensors",
+ "videoCapture",
+ "videoCapturePanTiltZoom",
+ "idleDetection",
+ "wakeLockScreen",
+ "wakeLockSystem"
+ ]
+ },
+ {
+ "id": "PermissionSetting",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "granted",
+ "denied",
+ "prompt"
+ ]
+ },
+ {
+ "id": "PermissionDescriptor",
+ "description": "Definition of PermissionDescriptor defined in the Permissions API:\nhttps://w3c.github.io/permissions/#dictdef-permissiondescriptor.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "title",
- "description": "Stylesheet title.",
+ "name": "name",
+ "description": "Name of permission.\nSee https://cs.chromium.org/chromium/src/third_party/blink/renderer/modules/permissions/permission_descriptor.idl for valid permission names.",
"type": "string"
},
{
- "name": "ownerNode",
- "description": "The backend id for the owner node of the stylesheet.",
+ "name": "sysex",
+ "description": "For \"midi\" permission, may also specify sysex control.",
"optional": true,
- "$ref": "DOM.BackendNodeId"
- },
- {
- "name": "disabled",
- "description": "Denotes whether the stylesheet is disabled.",
"type": "boolean"
},
{
- "name": "hasSourceURL",
- "description": "Whether the sourceURL field value comes from the sourceURL comment.",
+ "name": "userVisibleOnly",
+ "description": "For \"push\" permission, may specify userVisibleOnly.\nNote that userVisibleOnly = true is the only currently supported type.",
"optional": true,
"type": "boolean"
},
{
- "name": "isInline",
- "description": "Whether this stylesheet is created for STYLE tag by parser. This flag is not set for\ndocument.written STYLE tags.",
+ "name": "allowWithoutSanitization",
+ "description": "For \"clipboard\" permission, may specify allowWithoutSanitization.",
+ "optional": true,
"type": "boolean"
},
{
- "name": "startLine",
- "description": "Line offset of the stylesheet within the resource (zero based).",
- "type": "number"
- },
- {
- "name": "startColumn",
- "description": "Column offset of the stylesheet within the resource (zero based).",
- "type": "number"
- },
- {
- "name": "length",
- "description": "Size of the content (in characters).",
- "type": "number"
+ "name": "panTiltZoom",
+ "description": "For \"camera\" permission, may specify panTiltZoom.",
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "id": "CSSRule",
- "description": "CSS rule representation.",
+ "id": "BrowserCommandId",
+ "description": "Browser command ids used by executeBrowserCommand.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "openTabSearch",
+ "closeTabSearch"
+ ]
+ },
+ {
+ "id": "Bucket",
+ "description": "Chrome histogram bucket.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "styleSheetId",
- "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
- "optional": true,
- "$ref": "StyleSheetId"
- },
- {
- "name": "selectorList",
- "description": "Rule selector data.",
- "$ref": "SelectorList"
- },
- {
- "name": "origin",
- "description": "Parent stylesheet's origin.",
- "$ref": "StyleSheetOrigin"
+ "name": "low",
+ "description": "Minimum value (inclusive).",
+ "type": "integer"
},
{
- "name": "style",
- "description": "Associated style declaration.",
- "$ref": "CSSStyle"
+ "name": "high",
+ "description": "Maximum value (exclusive).",
+ "type": "integer"
},
{
- "name": "media",
- "description": "Media list array (for rules involving media queries). The array enumerates media queries\nstarting with the innermost one, going outwards.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "CSSMedia"
- }
+ "name": "count",
+ "description": "Number of samples.",
+ "type": "integer"
}
]
},
{
- "id": "RuleUsage",
- "description": "CSS coverage information.",
+ "id": "Histogram",
+ "description": "Chrome histogram.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "styleSheetId",
- "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
- "$ref": "StyleSheetId"
+ "name": "name",
+ "description": "Name.",
+ "type": "string"
},
{
- "name": "startOffset",
- "description": "Offset of the start of the rule (including selector) from the beginning of the stylesheet.",
- "type": "number"
+ "name": "sum",
+ "description": "Sum of sample values.",
+ "type": "integer"
},
{
- "name": "endOffset",
- "description": "Offset of the end of the rule body from the beginning of the stylesheet.",
- "type": "number"
+ "name": "count",
+ "description": "Total number of samples.",
+ "type": "integer"
},
{
- "name": "used",
- "description": "Indicates whether the rule was actually used by some element in the page.",
- "type": "boolean"
+ "name": "buckets",
+ "description": "Buckets.",
+ "type": "array",
+ "items": {
+ "$ref": "Bucket"
+ }
}
]
- },
+ }
+ ],
+ "commands": [
{
- "id": "SourceRange",
- "description": "Text range within a resource. All numbers are zero-based.",
- "type": "object",
- "properties": [
+ "name": "setPermission",
+ "description": "Set permission settings for given origin.",
+ "experimental": true,
+ "parameters": [
{
- "name": "startLine",
- "description": "Start line of range.",
- "type": "integer"
+ "name": "permission",
+ "description": "Descriptor of permission to override.",
+ "$ref": "PermissionDescriptor"
},
{
- "name": "startColumn",
- "description": "Start column of range (inclusive).",
- "type": "integer"
+ "name": "setting",
+ "description": "Setting of the permission.",
+ "$ref": "PermissionSetting"
},
{
- "name": "endLine",
- "description": "End line of range",
- "type": "integer"
+ "name": "origin",
+ "description": "Origin the permission applies to, all origins if not specified.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "endColumn",
- "description": "End column of range (exclusive).",
- "type": "integer"
+ "name": "browserContextId",
+ "description": "Context to override. When omitted, default browser context is used.",
+ "optional": true,
+ "$ref": "BrowserContextID"
}
]
},
{
- "id": "ShorthandEntry",
- "type": "object",
- "properties": [
+ "name": "grantPermissions",
+ "description": "Grant specific permissions to the given origin and reject all others.",
+ "experimental": true,
+ "parameters": [
{
- "name": "name",
- "description": "Shorthand name.",
- "type": "string"
+ "name": "permissions",
+ "type": "array",
+ "items": {
+ "$ref": "PermissionType"
+ }
},
{
- "name": "value",
- "description": "Shorthand value.",
+ "name": "origin",
+ "description": "Origin the permission applies to, all origins if not specified.",
+ "optional": true,
"type": "string"
},
{
- "name": "important",
- "description": "Whether the property has \"!important\" annotation (implies `false` if absent).",
+ "name": "browserContextId",
+ "description": "BrowserContext to override permissions. When omitted, default browser context is used.",
"optional": true,
- "type": "boolean"
+ "$ref": "BrowserContextID"
}
]
},
{
- "id": "CSSComputedStyleProperty",
- "type": "object",
- "properties": [
- {
- "name": "name",
- "description": "Computed style property name.",
- "type": "string"
- },
+ "name": "resetPermissions",
+ "description": "Reset all permission management for all origins.",
+ "experimental": true,
+ "parameters": [
{
- "name": "value",
- "description": "Computed style property value.",
- "type": "string"
+ "name": "browserContextId",
+ "description": "BrowserContext to reset permissions. When omitted, default browser context is used.",
+ "optional": true,
+ "$ref": "BrowserContextID"
}
]
},
{
- "id": "CSSStyle",
- "description": "CSS style representation.",
- "type": "object",
- "properties": [
- {
- "name": "styleSheetId",
- "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
- "optional": true,
- "$ref": "StyleSheetId"
- },
+ "name": "setDownloadBehavior",
+ "description": "Set the behavior when downloading a file.",
+ "experimental": true,
+ "parameters": [
{
- "name": "cssProperties",
- "description": "CSS properties in the style.",
- "type": "array",
- "items": {
- "$ref": "CSSProperty"
- }
+ "name": "behavior",
+ "description": "Whether to allow all or deny all download requests, or use default Chrome behavior if\navailable (otherwise deny). |allowAndName| allows download and names files according to\ntheir dowmload guids.",
+ "type": "string",
+ "enum": [
+ "deny",
+ "allow",
+ "allowAndName",
+ "default"
+ ]
},
{
- "name": "shorthandEntries",
- "description": "Computed values for all shorthands found in the style.",
- "type": "array",
- "items": {
- "$ref": "ShorthandEntry"
- }
+ "name": "browserContextId",
+ "description": "BrowserContext to set download behavior. When omitted, default browser context is used.",
+ "optional": true,
+ "$ref": "BrowserContextID"
},
{
- "name": "cssText",
- "description": "Style declaration text (if available).",
+ "name": "downloadPath",
+ "description": "The default path to save downloaded files to. This is required if behavior is set to 'allow'\nor 'allowAndName'.",
"optional": true,
"type": "string"
},
{
- "name": "range",
- "description": "Style declaration range in the enclosing stylesheet (if available).",
+ "name": "eventsEnabled",
+ "description": "Whether to emit download events (defaults to false).",
"optional": true,
- "$ref": "SourceRange"
+ "type": "boolean"
}
]
},
{
- "id": "CSSProperty",
- "description": "CSS property declaration data.",
- "type": "object",
- "properties": [
- {
- "name": "name",
- "description": "The property name.",
- "type": "string"
- },
+ "name": "cancelDownload",
+ "description": "Cancel a download if in progress",
+ "experimental": true,
+ "parameters": [
{
- "name": "value",
- "description": "The property value.",
+ "name": "guid",
+ "description": "Global unique identifier of the download.",
"type": "string"
},
{
- "name": "important",
- "description": "Whether the property has \"!important\" annotation (implies `false` if absent).",
+ "name": "browserContextId",
+ "description": "BrowserContext to perform the action in. When omitted, default browser context is used.",
"optional": true,
- "type": "boolean"
- },
+ "$ref": "BrowserContextID"
+ }
+ ]
+ },
+ {
+ "name": "close",
+ "description": "Close browser gracefully."
+ },
+ {
+ "name": "crash",
+ "description": "Crashes browser on the main thread.",
+ "experimental": true
+ },
+ {
+ "name": "crashGpuProcess",
+ "description": "Crashes GPU process.",
+ "experimental": true
+ },
+ {
+ "name": "getVersion",
+ "description": "Returns version information.",
+ "returns": [
{
- "name": "implicit",
- "description": "Whether the property is implicit (implies `false` if absent).",
- "optional": true,
- "type": "boolean"
+ "name": "protocolVersion",
+ "description": "Protocol version.",
+ "type": "string"
},
{
- "name": "text",
- "description": "The full property text as specified in the style.",
- "optional": true,
+ "name": "product",
+ "description": "Product name.",
"type": "string"
},
{
- "name": "parsedOk",
- "description": "Whether the property is understood by the browser (implies `true` if absent).",
- "optional": true,
- "type": "boolean"
+ "name": "revision",
+ "description": "Product revision.",
+ "type": "string"
},
{
- "name": "disabled",
- "description": "Whether the property is disabled by the user (present for source-based properties only).",
- "optional": true,
- "type": "boolean"
+ "name": "userAgent",
+ "description": "User-Agent.",
+ "type": "string"
},
{
- "name": "range",
- "description": "The entire property range in the enclosing style declaration (if available).",
- "optional": true,
- "$ref": "SourceRange"
+ "name": "jsVersion",
+ "description": "V8 version.",
+ "type": "string"
}
]
},
{
- "id": "CSSMedia",
- "description": "CSS media rule descriptor.",
- "type": "object",
- "properties": [
- {
- "name": "text",
- "description": "Media query text.",
- "type": "string"
- },
+ "name": "getBrowserCommandLine",
+ "description": "Returns the command line switches for the browser process if, and only if\n--enable-automation is on the commandline.",
+ "experimental": true,
+ "returns": [
{
- "name": "source",
- "description": "Source of the media query: \"mediaRule\" if specified by a @media rule, \"importRule\" if\nspecified by an @import rule, \"linkedSheet\" if specified by a \"media\" attribute in a linked\nstylesheet's LINK tag, \"inlineSheet\" if specified by a \"media\" attribute in an inline\nstylesheet's STYLE tag.",
- "type": "string",
- "enum": [
- "mediaRule",
- "importRule",
- "linkedSheet",
- "inlineSheet"
- ]
- },
+ "name": "arguments",
+ "description": "Commandline parameters",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "name": "getHistograms",
+ "description": "Get Chrome histograms.",
+ "experimental": true,
+ "parameters": [
{
- "name": "sourceURL",
- "description": "URL of the document containing the media query description.",
+ "name": "query",
+ "description": "Requested substring in name. Only histograms which have query as a\nsubstring in their name are extracted. An empty or absent query returns\nall histograms.",
"optional": true,
"type": "string"
},
{
- "name": "range",
- "description": "The associated rule (@media or @import) header range in the enclosing stylesheet (if\navailable).",
- "optional": true,
- "$ref": "SourceRange"
- },
- {
- "name": "styleSheetId",
- "description": "Identifier of the stylesheet containing this object (if exists).",
+ "name": "delta",
+ "description": "If true, retrieve delta since last call.",
"optional": true,
- "$ref": "StyleSheetId"
- },
+ "type": "boolean"
+ }
+ ],
+ "returns": [
{
- "name": "mediaList",
- "description": "Array of media queries.",
- "optional": true,
+ "name": "histograms",
+ "description": "Histograms.",
"type": "array",
"items": {
- "$ref": "MediaQuery"
+ "$ref": "Histogram"
}
}
]
},
{
- "id": "MediaQuery",
- "description": "Media query descriptor.",
- "type": "object",
- "properties": [
+ "name": "getHistogram",
+ "description": "Get a Chrome histogram by name.",
+ "experimental": true,
+ "parameters": [
{
- "name": "expressions",
- "description": "Array of media query expressions.",
- "type": "array",
- "items": {
- "$ref": "MediaQueryExpression"
- }
+ "name": "name",
+ "description": "Requested histogram name.",
+ "type": "string"
},
{
- "name": "active",
- "description": "Whether the media query condition is satisfied.",
+ "name": "delta",
+ "description": "If true, retrieve delta since last call.",
+ "optional": true,
"type": "boolean"
}
+ ],
+ "returns": [
+ {
+ "name": "histogram",
+ "description": "Histogram.",
+ "$ref": "Histogram"
+ }
]
},
{
- "id": "MediaQueryExpression",
- "description": "Media query expression descriptor.",
- "type": "object",
- "properties": [
- {
- "name": "value",
- "description": "Media query expression value.",
- "type": "number"
- },
+ "name": "getWindowBounds",
+ "description": "Get position and size of the browser window.",
+ "experimental": true,
+ "parameters": [
{
- "name": "unit",
- "description": "Media query expression units.",
- "type": "string"
- },
+ "name": "windowId",
+ "description": "Browser window id.",
+ "$ref": "WindowID"
+ }
+ ],
+ "returns": [
{
- "name": "feature",
- "description": "Media query expression feature.",
- "type": "string"
- },
+ "name": "bounds",
+ "description": "Bounds information of the window. When window state is 'minimized', the restored window\nposition and size are returned.",
+ "$ref": "Bounds"
+ }
+ ]
+ },
+ {
+ "name": "getWindowForTarget",
+ "description": "Get the browser window that contains the devtools target.",
+ "experimental": true,
+ "parameters": [
{
- "name": "valueRange",
- "description": "The associated range of the value text in the enclosing stylesheet (if available).",
+ "name": "targetId",
+ "description": "Devtools agent host id. If called as a part of the session, associated targetId is used.",
"optional": true,
- "$ref": "SourceRange"
+ "$ref": "Target.TargetID"
+ }
+ ],
+ "returns": [
+ {
+ "name": "windowId",
+ "description": "Browser window id.",
+ "$ref": "WindowID"
},
{
- "name": "computedLength",
- "description": "Computed length of media query expression (if applicable).",
- "optional": true,
- "type": "number"
+ "name": "bounds",
+ "description": "Bounds information of the window. When window state is 'minimized', the restored window\nposition and size are returned.",
+ "$ref": "Bounds"
}
]
},
{
- "id": "PlatformFontUsage",
- "description": "Information about amount of glyphs that were rendered with given font.",
- "type": "object",
- "properties": [
- {
- "name": "familyName",
- "description": "Font's family name reported by platform.",
- "type": "string"
- },
+ "name": "setWindowBounds",
+ "description": "Set position and/or size of the browser window.",
+ "experimental": true,
+ "parameters": [
{
- "name": "isCustomFont",
- "description": "Indicates if the font was downloaded or resolved locally.",
- "type": "boolean"
+ "name": "windowId",
+ "description": "Browser window id.",
+ "$ref": "WindowID"
},
{
- "name": "glyphCount",
- "description": "Amount of glyphs that were rendered with this font.",
- "type": "number"
+ "name": "bounds",
+ "description": "New window bounds. The 'minimized', 'maximized' and 'fullscreen' states cannot be combined\nwith 'left', 'top', 'width' or 'height'. Leaves unspecified fields unchanged.",
+ "$ref": "Bounds"
}
]
},
{
- "id": "FontFace",
- "description": "Properties of a web font: https://www.w3.org/TR/2008/REC-CSS2-20080411/fonts.html#font-descriptions",
- "type": "object",
- "properties": [
- {
- "name": "fontFamily",
- "description": "The font-family.",
- "type": "string"
- },
+ "name": "setDockTile",
+ "description": "Set dock tile details, platform-specific.",
+ "experimental": true,
+ "parameters": [
{
- "name": "fontStyle",
- "description": "The font-style.",
+ "name": "badgeLabel",
+ "optional": true,
"type": "string"
},
{
- "name": "fontVariant",
- "description": "The font-variant.",
+ "name": "image",
+ "description": "Png encoded image. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
"type": "string"
- },
+ }
+ ]
+ },
+ {
+ "name": "executeBrowserCommand",
+ "description": "Invoke custom browser commands used by telemetry.",
+ "experimental": true,
+ "parameters": [
{
- "name": "fontWeight",
- "description": "The font-weight.",
- "type": "string"
- },
+ "name": "commandId",
+ "$ref": "BrowserCommandId"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "downloadWillBegin",
+ "description": "Fired when page is about to start a download.",
+ "experimental": true,
+ "parameters": [
{
- "name": "fontStretch",
- "description": "The font-stretch.",
- "type": "string"
+ "name": "frameId",
+ "description": "Id of the frame that caused the download to begin.",
+ "$ref": "Page.FrameId"
},
{
- "name": "unicodeRange",
- "description": "The unicode-range.",
+ "name": "guid",
+ "description": "Global unique identifier of the download.",
"type": "string"
},
{
- "name": "src",
- "description": "The src.",
+ "name": "url",
+ "description": "URL of the resource being downloaded.",
"type": "string"
},
{
- "name": "platformFontFamily",
- "description": "The resolved platform font family",
+ "name": "suggestedFilename",
+ "description": "Suggested file name of the resource (the actual name of the file saved on disk may differ).",
"type": "string"
}
]
},
{
- "id": "CSSKeyframesRule",
- "description": "CSS keyframes rule representation.",
- "type": "object",
- "properties": [
+ "name": "downloadProgress",
+ "description": "Fired when download makes progress. Last call has |done| == true.",
+ "experimental": true,
+ "parameters": [
{
- "name": "animationName",
- "description": "Animation name.",
- "$ref": "Value"
+ "name": "guid",
+ "description": "Global unique identifier of the download.",
+ "type": "string"
},
{
- "name": "keyframes",
- "description": "List of keyframes.",
- "type": "array",
- "items": {
- "$ref": "CSSKeyframeRule"
- }
- }
- ]
- },
- {
- "id": "CSSKeyframeRule",
- "description": "CSS keyframe rule representation.",
- "type": "object",
- "properties": [
+ "name": "totalBytes",
+ "description": "Total expected bytes to download.",
+ "type": "number"
+ },
{
- "name": "styleSheetId",
- "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
- "optional": true,
- "$ref": "StyleSheetId"
+ "name": "receivedBytes",
+ "description": "Total bytes received.",
+ "type": "number"
},
{
- "name": "origin",
- "description": "Parent stylesheet's origin.",
- "$ref": "StyleSheetOrigin"
- },
- {
- "name": "keyText",
- "description": "Associated key text.",
- "$ref": "Value"
- },
- {
- "name": "style",
- "description": "Associated style declaration.",
- "$ref": "CSSStyle"
- }
- ]
- },
- {
- "id": "StyleDeclarationEdit",
- "description": "A descriptor of operation to mutate style declaration text.",
- "type": "object",
- "properties": [
- {
- "name": "styleSheetId",
- "description": "The css style sheet identifier.",
- "$ref": "StyleSheetId"
- },
- {
- "name": "range",
- "description": "The range of the style text in the enclosing stylesheet.",
- "$ref": "SourceRange"
- },
- {
- "name": "text",
- "description": "New style text.",
- "type": "string"
+ "name": "state",
+ "description": "Download status.",
+ "type": "string",
+ "enum": [
+ "inProgress",
+ "completed",
+ "canceled"
+ ]
}
]
}
+ ]
+ },
+ {
+ "domain": "CSS",
+ "description": "This domain exposes CSS read/write operations. All CSS objects (stylesheets, rules, and styles)\nhave an associated `id` used in subsequent operations on the related object. Each object type has\na specific `id` structure, and those are not interchangeable between objects of different kinds.\nCSS objects can be loaded using the `get*ForNode()` calls (which accept a DOM node id). A client\ncan also keep track of stylesheets via the `styleSheetAdded`/`styleSheetRemoved` events and\nsubsequently load the required stylesheet contents using the `getStyleSheet[Text]()` methods.",
+ "experimental": true,
+ "dependencies": [
+ "DOM",
+ "Page"
],
- "commands": [
+ "types": [
{
- "name": "addRule",
- "description": "Inserts a new rule with the given `ruleText` in a stylesheet with given `styleSheetId`, at the\nposition specified by `location`.",
- "parameters": [
- {
- "name": "styleSheetId",
- "description": "The css style sheet identifier where a new rule should be inserted.",
- "$ref": "StyleSheetId"
- },
- {
- "name": "ruleText",
- "description": "The text of a new rule.",
- "type": "string"
- },
- {
- "name": "location",
- "description": "Text position of a new rule in the target style sheet.",
- "$ref": "SourceRange"
- }
- ],
- "returns": [
- {
- "name": "rule",
- "description": "The newly created rule.",
- "$ref": "CSSRule"
- }
+ "id": "StyleSheetId",
+ "type": "string"
+ },
+ {
+ "id": "StyleSheetOrigin",
+ "description": "Stylesheet type: \"injected\" for stylesheets injected via extension, \"user-agent\" for user-agent\nstylesheets, \"inspector\" for stylesheets created by the inspector (i.e. those holding the \"via\ninspector\" rules), \"regular\" for regular stylesheets.",
+ "type": "string",
+ "enum": [
+ "injected",
+ "user-agent",
+ "inspector",
+ "regular"
]
},
{
- "name": "collectClassNames",
- "description": "Returns all class names from specified stylesheet.",
- "parameters": [
+ "id": "PseudoElementMatches",
+ "description": "CSS rule collection for a single pseudo style.",
+ "type": "object",
+ "properties": [
{
- "name": "styleSheetId",
- "$ref": "StyleSheetId"
- }
- ],
- "returns": [
+ "name": "pseudoType",
+ "description": "Pseudo element type.",
+ "$ref": "DOM.PseudoType"
+ },
{
- "name": "classNames",
- "description": "Class name list.",
+ "name": "matches",
+ "description": "Matches of CSS rules applicable to the pseudo style.",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "RuleMatch"
}
}
]
},
{
- "name": "createStyleSheet",
- "description": "Creates a new special \"via-inspector\" stylesheet in the frame with given `frameId`.",
- "parameters": [
+ "id": "InheritedStyleEntry",
+ "description": "Inherited CSS rule collection from ancestor node.",
+ "type": "object",
+ "properties": [
{
- "name": "frameId",
- "description": "Identifier of the frame where \"via-inspector\" stylesheet should be created.",
- "$ref": "Page.FrameId"
- }
- ],
- "returns": [
+ "name": "inlineStyle",
+ "description": "The ancestor node's inline style, if any, in the style inheritance chain.",
+ "optional": true,
+ "$ref": "CSSStyle"
+ },
{
- "name": "styleSheetId",
- "description": "Identifier of the created \"via-inspector\" stylesheet.",
- "$ref": "StyleSheetId"
+ "name": "matchedCSSRules",
+ "description": "Matches of CSS rules matching the ancestor node in the style inheritance chain.",
+ "type": "array",
+ "items": {
+ "$ref": "RuleMatch"
+ }
}
]
},
{
- "name": "disable",
- "description": "Disables the CSS agent for the given page."
- },
- {
- "name": "enable",
- "description": "Enables the CSS agent for the given page. Clients should not assume that the CSS agent has been\nenabled until the result of this command is received."
- },
- {
- "name": "forcePseudoState",
- "description": "Ensures that the given node will have specified pseudo-classes whenever its style is computed by\nthe browser.",
- "parameters": [
+ "id": "RuleMatch",
+ "description": "Match data for a CSS rule.",
+ "type": "object",
+ "properties": [
{
- "name": "nodeId",
- "description": "The element id for which to force the pseudo state.",
- "$ref": "DOM.NodeId"
+ "name": "rule",
+ "description": "CSS rule in the match.",
+ "$ref": "CSSRule"
},
{
- "name": "forcedPseudoClasses",
- "description": "Element pseudo classes to force when computing the element's style.",
+ "name": "matchingSelectors",
+ "description": "Matching selector indices in the rule's selectorList selectors (0-based).",
"type": "array",
"items": {
- "type": "string"
+ "type": "integer"
}
}
]
},
{
- "name": "getBackgroundColors",
- "parameters": [
- {
- "name": "nodeId",
- "description": "Id of the node to get background colors for.",
- "$ref": "DOM.NodeId"
- }
- ],
- "returns": [
- {
- "name": "backgroundColors",
- "description": "The range of background colors behind this element, if it contains any visible text. If no\nvisible text is present, this will be undefined. In the case of a flat background color,\nthis will consist of simply that color. In the case of a gradient, this will consist of each\nof the color stops. For anything more complicated, this will be an empty array. Images will\nbe ignored (as if the image had failed to load).",
- "optional": true,
- "type": "array",
- "items": {
- "type": "string"
- }
- },
+ "id": "Value",
+ "description": "Data for a simple selector (these are delimited by commas in a selector list).",
+ "type": "object",
+ "properties": [
{
- "name": "computedFontSize",
- "description": "The computed font size for this node, as a CSS computed value string (e.g. '12px').",
- "optional": true,
+ "name": "text",
+ "description": "Value text.",
"type": "string"
},
{
- "name": "computedFontWeight",
- "description": "The computed font weight for this node, as a CSS computed value string (e.g. 'normal' or\n'100').",
+ "name": "range",
+ "description": "Value range in the underlying resource (if available).",
"optional": true,
- "type": "string"
+ "$ref": "SourceRange"
}
]
},
{
- "name": "getComputedStyleForNode",
- "description": "Returns the computed style for a DOM node identified by `nodeId`.",
- "parameters": [
- {
- "name": "nodeId",
- "$ref": "DOM.NodeId"
- }
- ],
- "returns": [
+ "id": "SelectorList",
+ "description": "Selector list data.",
+ "type": "object",
+ "properties": [
{
- "name": "computedStyle",
- "description": "Computed style for the specified DOM node.",
+ "name": "selectors",
+ "description": "Selectors in the list.",
"type": "array",
"items": {
- "$ref": "CSSComputedStyleProperty"
+ "$ref": "Value"
}
+ },
+ {
+ "name": "text",
+ "description": "Rule selector text.",
+ "type": "string"
}
]
},
{
- "name": "getInlineStylesForNode",
- "description": "Returns the styles defined inline (explicitly in the \"style\" attribute and implicitly, using DOM\nattributes) for a DOM node identified by `nodeId`.",
- "parameters": [
+ "id": "CSSStyleSheetHeader",
+ "description": "CSS stylesheet metainformation.",
+ "type": "object",
+ "properties": [
{
- "name": "nodeId",
- "$ref": "DOM.NodeId"
- }
- ],
- "returns": [
+ "name": "styleSheetId",
+ "description": "The stylesheet identifier.",
+ "$ref": "StyleSheetId"
+ },
{
- "name": "inlineStyle",
- "description": "Inline style for the specified DOM node.",
- "optional": true,
- "$ref": "CSSStyle"
+ "name": "frameId",
+ "description": "Owner frame identifier.",
+ "$ref": "Page.FrameId"
},
{
- "name": "attributesStyle",
- "description": "Attribute-defined element style (e.g. resulting from \"width=20 height=100%\").",
+ "name": "sourceURL",
+ "description": "Stylesheet resource URL. Empty if this is a constructed stylesheet created using\nnew CSSStyleSheet() (but non-empty if this is a constructed sylesheet imported\nas a CSS module script).",
+ "type": "string"
+ },
+ {
+ "name": "sourceMapURL",
+ "description": "URL of source map associated with the stylesheet (if any).",
"optional": true,
- "$ref": "CSSStyle"
- }
- ]
- },
- {
- "name": "getMatchedStylesForNode",
- "description": "Returns requested styles for a DOM node identified by `nodeId`.",
- "parameters": [
+ "type": "string"
+ },
{
- "name": "nodeId",
- "$ref": "DOM.NodeId"
- }
- ],
- "returns": [
+ "name": "origin",
+ "description": "Stylesheet origin.",
+ "$ref": "StyleSheetOrigin"
+ },
{
- "name": "inlineStyle",
- "description": "Inline style for the specified DOM node.",
+ "name": "title",
+ "description": "Stylesheet title.",
+ "type": "string"
+ },
+ {
+ "name": "ownerNode",
+ "description": "The backend id for the owner node of the stylesheet.",
"optional": true,
- "$ref": "CSSStyle"
+ "$ref": "DOM.BackendNodeId"
},
{
- "name": "attributesStyle",
- "description": "Attribute-defined element style (e.g. resulting from \"width=20 height=100%\").",
+ "name": "disabled",
+ "description": "Denotes whether the stylesheet is disabled.",
+ "type": "boolean"
+ },
+ {
+ "name": "hasSourceURL",
+ "description": "Whether the sourceURL field value comes from the sourceURL comment.",
"optional": true,
- "$ref": "CSSStyle"
+ "type": "boolean"
},
{
- "name": "matchedCSSRules",
- "description": "CSS rules matching this node, from all applicable stylesheets.",
+ "name": "isInline",
+ "description": "Whether this stylesheet is created for STYLE tag by parser. This flag is not set for\ndocument.written STYLE tags.",
+ "type": "boolean"
+ },
+ {
+ "name": "isMutable",
+ "description": "Whether this stylesheet is mutable. Inline stylesheets become mutable\nafter they have been modified via CSSOM API.\n element's stylesheets become mutable only if DevTools modifies them.\nConstructed stylesheets (new CSSStyleSheet()) are mutable immediately after creation.",
+ "type": "boolean"
+ },
+ {
+ "name": "isConstructed",
+ "description": "True if this stylesheet is created through new CSSStyleSheet() or imported as a\nCSS module script.",
+ "type": "boolean"
+ },
+ {
+ "name": "startLine",
+ "description": "Line offset of the stylesheet within the resource (zero based).",
+ "type": "number"
+ },
+ {
+ "name": "startColumn",
+ "description": "Column offset of the stylesheet within the resource (zero based).",
+ "type": "number"
+ },
+ {
+ "name": "length",
+ "description": "Size of the content (in characters).",
+ "type": "number"
+ },
+ {
+ "name": "endLine",
+ "description": "Line offset of the end of the stylesheet within the resource (zero based).",
+ "type": "number"
+ },
+ {
+ "name": "endColumn",
+ "description": "Column offset of the end of the stylesheet within the resource (zero based).",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "id": "CSSRule",
+ "description": "CSS rule representation.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "styleSheetId",
+ "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
"optional": true,
- "type": "array",
- "items": {
- "$ref": "RuleMatch"
- }
+ "$ref": "StyleSheetId"
},
{
- "name": "pseudoElements",
- "description": "Pseudo style matches for this node.",
+ "name": "selectorList",
+ "description": "Rule selector data.",
+ "$ref": "SelectorList"
+ },
+ {
+ "name": "origin",
+ "description": "Parent stylesheet's origin.",
+ "$ref": "StyleSheetOrigin"
+ },
+ {
+ "name": "style",
+ "description": "Associated style declaration.",
+ "$ref": "CSSStyle"
+ },
+ {
+ "name": "media",
+ "description": "Media list array (for rules involving media queries). The array enumerates media queries\nstarting with the innermost one, going outwards.",
"optional": true,
"type": "array",
"items": {
- "$ref": "PseudoElementMatches"
+ "$ref": "CSSMedia"
}
},
{
- "name": "inherited",
- "description": "A chain of inherited styles (from the immediate node parent up to the DOM tree root).",
+ "name": "containerQueries",
+ "description": "Container query list array (for rules involving container queries).\nThe array enumerates container queries starting with the innermost one, going outwards.",
+ "experimental": true,
"optional": true,
"type": "array",
"items": {
- "$ref": "InheritedStyleEntry"
+ "$ref": "CSSContainerQuery"
}
},
{
- "name": "cssKeyframesRules",
- "description": "A list of CSS keyframed animations matching this node.",
+ "name": "supports",
+ "description": "@supports CSS at-rule array.\nThe array enumerates @supports at-rules starting with the innermost one, going outwards.",
+ "experimental": true,
"optional": true,
"type": "array",
"items": {
- "$ref": "CSSKeyframesRule"
+ "$ref": "CSSSupports"
}
}
]
},
{
- "name": "getMediaQueries",
- "description": "Returns all media queries parsed by the rendering engine.",
- "returns": [
+ "id": "RuleUsage",
+ "description": "CSS coverage information.",
+ "type": "object",
+ "properties": [
{
- "name": "medias",
- "type": "array",
- "items": {
- "$ref": "CSSMedia"
- }
- }
- ]
- },
- {
- "name": "getPlatformFontsForNode",
- "description": "Requests information about platform fonts which we used to render child TextNodes in the given\nnode.",
- "parameters": [
+ "name": "styleSheetId",
+ "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
+ "$ref": "StyleSheetId"
+ },
{
- "name": "nodeId",
- "$ref": "DOM.NodeId"
- }
- ],
- "returns": [
+ "name": "startOffset",
+ "description": "Offset of the start of the rule (including selector) from the beginning of the stylesheet.",
+ "type": "number"
+ },
{
- "name": "fonts",
- "description": "Usage statistics for every employed platform font.",
- "type": "array",
- "items": {
- "$ref": "PlatformFontUsage"
- }
+ "name": "endOffset",
+ "description": "Offset of the end of the rule body from the beginning of the stylesheet.",
+ "type": "number"
+ },
+ {
+ "name": "used",
+ "description": "Indicates whether the rule was actually used by some element in the page.",
+ "type": "boolean"
}
]
},
{
- "name": "getStyleSheetText",
- "description": "Returns the current textual content for a stylesheet.",
- "parameters": [
+ "id": "SourceRange",
+ "description": "Text range within a resource. All numbers are zero-based.",
+ "type": "object",
+ "properties": [
{
- "name": "styleSheetId",
- "$ref": "StyleSheetId"
- }
- ],
- "returns": [
+ "name": "startLine",
+ "description": "Start line of range.",
+ "type": "integer"
+ },
{
- "name": "text",
- "description": "The stylesheet text.",
- "type": "string"
+ "name": "startColumn",
+ "description": "Start column of range (inclusive).",
+ "type": "integer"
+ },
+ {
+ "name": "endLine",
+ "description": "End line of range",
+ "type": "integer"
+ },
+ {
+ "name": "endColumn",
+ "description": "End column of range (exclusive).",
+ "type": "integer"
}
]
},
{
- "name": "setEffectivePropertyValueForNode",
- "description": "Find a rule with the given active property for the given node and set the new value for this\nproperty",
- "parameters": [
- {
- "name": "nodeId",
- "description": "The element id for which to set property.",
- "$ref": "DOM.NodeId"
- },
+ "id": "ShorthandEntry",
+ "type": "object",
+ "properties": [
{
- "name": "propertyName",
+ "name": "name",
+ "description": "Shorthand name.",
"type": "string"
},
{
"name": "value",
+ "description": "Shorthand value.",
"type": "string"
+ },
+ {
+ "name": "important",
+ "description": "Whether the property has \"!important\" annotation (implies `false` if absent).",
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "name": "setKeyframeKey",
- "description": "Modifies the keyframe rule key text.",
- "parameters": [
- {
- "name": "styleSheetId",
- "$ref": "StyleSheetId"
- },
+ "id": "CSSComputedStyleProperty",
+ "type": "object",
+ "properties": [
{
- "name": "range",
- "$ref": "SourceRange"
+ "name": "name",
+ "description": "Computed style property name.",
+ "type": "string"
},
{
- "name": "keyText",
+ "name": "value",
+ "description": "Computed style property value.",
"type": "string"
}
- ],
- "returns": [
- {
- "name": "keyText",
- "description": "The resulting key text after modification.",
- "$ref": "Value"
- }
]
},
{
- "name": "setMediaText",
- "description": "Modifies the rule selector.",
- "parameters": [
+ "id": "CSSStyle",
+ "description": "CSS style representation.",
+ "type": "object",
+ "properties": [
{
"name": "styleSheetId",
+ "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
+ "optional": true,
"$ref": "StyleSheetId"
},
{
- "name": "range",
- "$ref": "SourceRange"
+ "name": "cssProperties",
+ "description": "CSS properties in the style.",
+ "type": "array",
+ "items": {
+ "$ref": "CSSProperty"
+ }
},
{
- "name": "text",
+ "name": "shorthandEntries",
+ "description": "Computed values for all shorthands found in the style.",
+ "type": "array",
+ "items": {
+ "$ref": "ShorthandEntry"
+ }
+ },
+ {
+ "name": "cssText",
+ "description": "Style declaration text (if available).",
+ "optional": true,
"type": "string"
- }
- ],
- "returns": [
+ },
{
- "name": "media",
- "description": "The resulting CSS media rule after modification.",
- "$ref": "CSSMedia"
+ "name": "range",
+ "description": "Style declaration range in the enclosing stylesheet (if available).",
+ "optional": true,
+ "$ref": "SourceRange"
}
]
},
{
- "name": "setRuleSelector",
- "description": "Modifies the rule selector.",
- "parameters": [
+ "id": "CSSProperty",
+ "description": "CSS property declaration data.",
+ "type": "object",
+ "properties": [
{
- "name": "styleSheetId",
- "$ref": "StyleSheetId"
+ "name": "name",
+ "description": "The property name.",
+ "type": "string"
},
{
- "name": "range",
- "$ref": "SourceRange"
+ "name": "value",
+ "description": "The property value.",
+ "type": "string"
},
{
- "name": "selector",
+ "name": "important",
+ "description": "Whether the property has \"!important\" annotation (implies `false` if absent).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "implicit",
+ "description": "Whether the property is implicit (implies `false` if absent).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "text",
+ "description": "The full property text as specified in the style.",
+ "optional": true,
"type": "string"
- }
- ],
- "returns": [
+ },
{
- "name": "selectorList",
- "description": "The resulting selector list after modification.",
- "$ref": "SelectorList"
+ "name": "parsedOk",
+ "description": "Whether the property is understood by the browser (implies `true` if absent).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "disabled",
+ "description": "Whether the property is disabled by the user (present for source-based properties only).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "range",
+ "description": "The entire property range in the enclosing style declaration (if available).",
+ "optional": true,
+ "$ref": "SourceRange"
}
]
},
{
- "name": "setStyleSheetText",
- "description": "Sets the new stylesheet text.",
- "parameters": [
- {
- "name": "styleSheetId",
- "$ref": "StyleSheetId"
- },
+ "id": "CSSMedia",
+ "description": "CSS media rule descriptor.",
+ "type": "object",
+ "properties": [
{
"name": "text",
+ "description": "Media query text.",
"type": "string"
- }
- ],
- "returns": [
+ },
{
- "name": "sourceMapURL",
- "description": "URL of source map associated with script (if any).",
+ "name": "source",
+ "description": "Source of the media query: \"mediaRule\" if specified by a @media rule, \"importRule\" if\nspecified by an @import rule, \"linkedSheet\" if specified by a \"media\" attribute in a linked\nstylesheet's LINK tag, \"inlineSheet\" if specified by a \"media\" attribute in an inline\nstylesheet's STYLE tag.",
+ "type": "string",
+ "enum": [
+ "mediaRule",
+ "importRule",
+ "linkedSheet",
+ "inlineSheet"
+ ]
+ },
+ {
+ "name": "sourceURL",
+ "description": "URL of the document containing the media query description.",
"optional": true,
"type": "string"
- }
- ]
- },
- {
- "name": "setStyleTexts",
- "description": "Applies specified style edits one after another in the given order.",
- "parameters": [
+ },
{
- "name": "edits",
- "type": "array",
- "items": {
- "$ref": "StyleDeclarationEdit"
- }
- }
- ],
- "returns": [
+ "name": "range",
+ "description": "The associated rule (@media or @import) header range in the enclosing stylesheet (if\navailable).",
+ "optional": true,
+ "$ref": "SourceRange"
+ },
{
- "name": "styles",
- "description": "The resulting styles after modification.",
+ "name": "styleSheetId",
+ "description": "Identifier of the stylesheet containing this object (if exists).",
+ "optional": true,
+ "$ref": "StyleSheetId"
+ },
+ {
+ "name": "mediaList",
+ "description": "Array of media queries.",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "CSSStyle"
+ "$ref": "MediaQuery"
}
}
]
},
{
- "name": "startRuleUsageTracking",
- "description": "Enables the selector recording."
- },
- {
- "name": "stopRuleUsageTracking",
- "description": "Stop tracking rule usage and return the list of rules that were used since last call to\n`takeCoverageDelta` (or since start of coverage instrumentation)",
- "returns": [
+ "id": "MediaQuery",
+ "description": "Media query descriptor.",
+ "type": "object",
+ "properties": [
{
- "name": "ruleUsage",
+ "name": "expressions",
+ "description": "Array of media query expressions.",
"type": "array",
"items": {
- "$ref": "RuleUsage"
+ "$ref": "MediaQueryExpression"
}
+ },
+ {
+ "name": "active",
+ "description": "Whether the media query condition is satisfied.",
+ "type": "boolean"
}
]
},
{
- "name": "takeCoverageDelta",
- "description": "Obtain list of rules that became used since last call to this method (or since start of coverage\ninstrumentation)",
- "returns": [
+ "id": "MediaQueryExpression",
+ "description": "Media query expression descriptor.",
+ "type": "object",
+ "properties": [
{
- "name": "coverage",
- "type": "array",
- "items": {
- "$ref": "RuleUsage"
- }
- }
- ]
- }
- ],
- "events": [
- {
- "name": "fontsUpdated",
- "description": "Fires whenever a web font is updated. A non-empty font parameter indicates a successfully loaded\nweb font",
- "parameters": [
+ "name": "value",
+ "description": "Media query expression value.",
+ "type": "number"
+ },
{
- "name": "font",
- "description": "The web font that has loaded.",
+ "name": "unit",
+ "description": "Media query expression units.",
+ "type": "string"
+ },
+ {
+ "name": "feature",
+ "description": "Media query expression feature.",
+ "type": "string"
+ },
+ {
+ "name": "valueRange",
+ "description": "The associated range of the value text in the enclosing stylesheet (if available).",
"optional": true,
- "$ref": "FontFace"
- }
- ]
- },
- {
- "name": "mediaQueryResultChanged",
- "description": "Fires whenever a MediaQuery result changes (for example, after a browser window has been\nresized.) The current implementation considers only viewport-dependent media features."
- },
- {
- "name": "styleSheetAdded",
- "description": "Fired whenever an active document stylesheet is added.",
- "parameters": [
+ "$ref": "SourceRange"
+ },
{
- "name": "header",
- "description": "Added stylesheet metainfo.",
- "$ref": "CSSStyleSheetHeader"
+ "name": "computedLength",
+ "description": "Computed length of media query expression (if applicable).",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "name": "styleSheetChanged",
- "description": "Fired whenever a stylesheet is changed as a result of the client operation.",
- "parameters": [
+ "id": "CSSContainerQuery",
+ "description": "CSS container query rule descriptor.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "text",
+ "description": "Container query text.",
+ "type": "string"
+ },
+ {
+ "name": "range",
+ "description": "The associated rule header range in the enclosing stylesheet (if\navailable).",
+ "optional": true,
+ "$ref": "SourceRange"
+ },
{
"name": "styleSheetId",
+ "description": "Identifier of the stylesheet containing this object (if exists).",
+ "optional": true,
"$ref": "StyleSheetId"
+ },
+ {
+ "name": "name",
+ "description": "Optional name for the container.",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "name": "styleSheetRemoved",
- "description": "Fired whenever an active document stylesheet is removed.",
- "parameters": [
+ "id": "CSSSupports",
+ "description": "CSS Supports at-rule descriptor.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "styleSheetId",
- "description": "Identifier of the removed stylesheet.",
- "$ref": "StyleSheetId"
+ "name": "text",
+ "description": "Supports rule text.",
+ "type": "string"
+ },
+ {
+ "name": "range",
+ "description": "The associated rule header range in the enclosing stylesheet (if\navailable).",
+ "optional": true,
+ "$ref": "SourceRange"
+ },
+ {
+ "name": "styleSheetId",
+ "description": "Identifier of the stylesheet containing this object (if exists).",
+ "optional": true,
+ "$ref": "StyleSheetId"
}
]
- }
- ]
- },
- {
- "domain": "CacheStorage",
- "experimental": true,
- "types": [
- {
- "id": "CacheId",
- "description": "Unique identifier of the Cache object.",
- "type": "string"
},
{
- "id": "CachedResponseType",
- "description": "type of HTTP response cached",
- "type": "string",
- "enum": [
- "basic",
- "cors",
- "default",
- "error",
- "opaqueResponse",
- "opaqueRedirect"
+ "id": "PlatformFontUsage",
+ "description": "Information about amount of glyphs that were rendered with given font.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "familyName",
+ "description": "Font's family name reported by platform.",
+ "type": "string"
+ },
+ {
+ "name": "isCustomFont",
+ "description": "Indicates if the font was downloaded or resolved locally.",
+ "type": "boolean"
+ },
+ {
+ "name": "glyphCount",
+ "description": "Amount of glyphs that were rendered with this font.",
+ "type": "number"
+ }
]
},
{
- "id": "DataEntry",
- "description": "Data entry.",
+ "id": "FontVariationAxis",
+ "description": "Information about font variation axes for variable fonts",
"type": "object",
"properties": [
{
- "name": "requestURL",
- "description": "Request URL.",
+ "name": "tag",
+ "description": "The font-variation-setting tag (a.k.a. \"axis tag\").",
"type": "string"
},
{
- "name": "requestMethod",
- "description": "Request method.",
+ "name": "name",
+ "description": "Human-readable variation name in the default language (normally, \"en\").",
"type": "string"
},
{
- "name": "requestHeaders",
- "description": "Request headers",
- "type": "array",
- "items": {
- "$ref": "Header"
- }
+ "name": "minValue",
+ "description": "The minimum value (inclusive) the font supports for this tag.",
+ "type": "number"
},
{
- "name": "responseTime",
- "description": "Number of seconds since epoch.",
+ "name": "maxValue",
+ "description": "The maximum value (inclusive) the font supports for this tag.",
"type": "number"
},
{
- "name": "responseStatus",
- "description": "HTTP response status code.",
- "type": "integer"
+ "name": "defaultValue",
+ "description": "The default value.",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "id": "FontFace",
+ "description": "Properties of a web font: https://www.w3.org/TR/2008/REC-CSS2-20080411/fonts.html#font-descriptions\nand additional information such as platformFontFamily and fontVariationAxes.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "fontFamily",
+ "description": "The font-family.",
+ "type": "string"
},
{
- "name": "responseStatusText",
- "description": "HTTP response status text.",
+ "name": "fontStyle",
+ "description": "The font-style.",
"type": "string"
},
{
- "name": "responseType",
- "description": "HTTP response type",
- "$ref": "CachedResponseType"
+ "name": "fontVariant",
+ "description": "The font-variant.",
+ "type": "string"
},
{
- "name": "responseHeaders",
- "description": "Response headers",
+ "name": "fontWeight",
+ "description": "The font-weight.",
+ "type": "string"
+ },
+ {
+ "name": "fontStretch",
+ "description": "The font-stretch.",
+ "type": "string"
+ },
+ {
+ "name": "unicodeRange",
+ "description": "The unicode-range.",
+ "type": "string"
+ },
+ {
+ "name": "src",
+ "description": "The src.",
+ "type": "string"
+ },
+ {
+ "name": "platformFontFamily",
+ "description": "The resolved platform font family",
+ "type": "string"
+ },
+ {
+ "name": "fontVariationAxes",
+ "description": "Available variation settings (a.k.a. \"axes\").",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "Header"
+ "$ref": "FontVariationAxis"
}
}
]
},
{
- "id": "Cache",
- "description": "Cache identifier.",
+ "id": "CSSKeyframesRule",
+ "description": "CSS keyframes rule representation.",
"type": "object",
"properties": [
{
- "name": "cacheId",
- "description": "An opaque unique id of the cache.",
- "$ref": "CacheId"
- },
- {
- "name": "securityOrigin",
- "description": "Security origin of the cache.",
- "type": "string"
+ "name": "animationName",
+ "description": "Animation name.",
+ "$ref": "Value"
},
{
- "name": "cacheName",
- "description": "The name of the cache.",
- "type": "string"
+ "name": "keyframes",
+ "description": "List of keyframes.",
+ "type": "array",
+ "items": {
+ "$ref": "CSSKeyframeRule"
+ }
}
]
},
{
- "id": "Header",
+ "id": "CSSKeyframeRule",
+ "description": "CSS keyframe rule representation.",
"type": "object",
"properties": [
{
- "name": "name",
- "type": "string"
+ "name": "styleSheetId",
+ "description": "The css style sheet identifier (absent for user agent stylesheet and user-specified\nstylesheet rules) this rule came from.",
+ "optional": true,
+ "$ref": "StyleSheetId"
},
{
- "name": "value",
- "type": "string"
+ "name": "origin",
+ "description": "Parent stylesheet's origin.",
+ "$ref": "StyleSheetOrigin"
+ },
+ {
+ "name": "keyText",
+ "description": "Associated key text.",
+ "$ref": "Value"
+ },
+ {
+ "name": "style",
+ "description": "Associated style declaration.",
+ "$ref": "CSSStyle"
}
]
},
{
- "id": "CachedResponse",
- "description": "Cached response",
+ "id": "StyleDeclarationEdit",
+ "description": "A descriptor of operation to mutate style declaration text.",
"type": "object",
"properties": [
{
- "name": "body",
- "description": "Entry content, base64-encoded.",
+ "name": "styleSheetId",
+ "description": "The css style sheet identifier.",
+ "$ref": "StyleSheetId"
+ },
+ {
+ "name": "range",
+ "description": "The range of the style text in the enclosing stylesheet.",
+ "$ref": "SourceRange"
+ },
+ {
+ "name": "text",
+ "description": "New style text.",
"type": "string"
}
]
@@ -2786,1351 +3415,1450 @@
],
"commands": [
{
- "name": "deleteCache",
- "description": "Deletes a cache.",
- "parameters": [
- {
- "name": "cacheId",
- "description": "Id of cache for deletion.",
- "$ref": "CacheId"
- }
- ]
- },
- {
- "name": "deleteEntry",
- "description": "Deletes a cache entry.",
+ "name": "addRule",
+ "description": "Inserts a new rule with the given `ruleText` in a stylesheet with given `styleSheetId`, at the\nposition specified by `location`.",
"parameters": [
{
- "name": "cacheId",
- "description": "Id of cache where the entry will be deleted.",
- "$ref": "CacheId"
+ "name": "styleSheetId",
+ "description": "The css style sheet identifier where a new rule should be inserted.",
+ "$ref": "StyleSheetId"
},
{
- "name": "request",
- "description": "URL spec of the request.",
+ "name": "ruleText",
+ "description": "The text of a new rule.",
"type": "string"
+ },
+ {
+ "name": "location",
+ "description": "Text position of a new rule in the target style sheet.",
+ "$ref": "SourceRange"
+ }
+ ],
+ "returns": [
+ {
+ "name": "rule",
+ "description": "The newly created rule.",
+ "$ref": "CSSRule"
}
]
},
{
- "name": "requestCacheNames",
- "description": "Requests cache names.",
+ "name": "collectClassNames",
+ "description": "Returns all class names from specified stylesheet.",
"parameters": [
{
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
}
],
"returns": [
{
- "name": "caches",
- "description": "Caches for the security origin.",
+ "name": "classNames",
+ "description": "Class name list.",
"type": "array",
"items": {
- "$ref": "Cache"
+ "type": "string"
}
}
]
},
{
- "name": "requestCachedResponse",
- "description": "Fetches cache entry.",
+ "name": "createStyleSheet",
+ "description": "Creates a new special \"via-inspector\" stylesheet in the frame with given `frameId`.",
"parameters": [
{
- "name": "cacheId",
- "description": "Id of cache that contains the entry.",
- "$ref": "CacheId"
- },
+ "name": "frameId",
+ "description": "Identifier of the frame where \"via-inspector\" stylesheet should be created.",
+ "$ref": "Page.FrameId"
+ }
+ ],
+ "returns": [
{
- "name": "requestURL",
- "description": "URL spec of the request.",
- "type": "string"
+ "name": "styleSheetId",
+ "description": "Identifier of the created \"via-inspector\" stylesheet.",
+ "$ref": "StyleSheetId"
+ }
+ ]
+ },
+ {
+ "name": "disable",
+ "description": "Disables the CSS agent for the given page."
+ },
+ {
+ "name": "enable",
+ "description": "Enables the CSS agent for the given page. Clients should not assume that the CSS agent has been\nenabled until the result of this command is received."
+ },
+ {
+ "name": "forcePseudoState",
+ "description": "Ensures that the given node will have specified pseudo-classes whenever its style is computed by\nthe browser.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "The element id for which to force the pseudo state.",
+ "$ref": "DOM.NodeId"
},
{
- "name": "requestHeaders",
- "description": "headers of the request.",
+ "name": "forcedPseudoClasses",
+ "description": "Element pseudo classes to force when computing the element's style.",
"type": "array",
"items": {
- "$ref": "Header"
+ "type": "string"
}
}
- ],
- "returns": [
- {
- "name": "response",
- "description": "Response read from the cache.",
- "$ref": "CachedResponse"
- }
]
},
{
- "name": "requestEntries",
- "description": "Requests data from cache.",
+ "name": "getBackgroundColors",
"parameters": [
{
- "name": "cacheId",
- "description": "ID of cache to get entries from.",
- "$ref": "CacheId"
- },
+ "name": "nodeId",
+ "description": "Id of the node to get background colors for.",
+ "$ref": "DOM.NodeId"
+ }
+ ],
+ "returns": [
{
- "name": "skipCount",
- "description": "Number of records to skip.",
- "type": "integer"
+ "name": "backgroundColors",
+ "description": "The range of background colors behind this element, if it contains any visible text. If no\nvisible text is present, this will be undefined. In the case of a flat background color,\nthis will consist of simply that color. In the case of a gradient, this will consist of each\nof the color stops. For anything more complicated, this will be an empty array. Images will\nbe ignored (as if the image had failed to load).",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
},
{
- "name": "pageSize",
- "description": "Number of records to fetch.",
- "type": "integer"
+ "name": "computedFontSize",
+ "description": "The computed font size for this node, as a CSS computed value string (e.g. '12px').",
+ "optional": true,
+ "type": "string"
},
{
- "name": "pathFilter",
- "description": "If present, only return the entries containing this substring in the path",
+ "name": "computedFontWeight",
+ "description": "The computed font weight for this node, as a CSS computed value string (e.g. 'normal' or\n'100').",
"optional": true,
"type": "string"
}
+ ]
+ },
+ {
+ "name": "getComputedStyleForNode",
+ "description": "Returns the computed style for a DOM node identified by `nodeId`.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "$ref": "DOM.NodeId"
+ }
],
"returns": [
{
- "name": "cacheDataEntries",
- "description": "Array of object store data entries.",
+ "name": "computedStyle",
+ "description": "Computed style for the specified DOM node.",
"type": "array",
"items": {
- "$ref": "DataEntry"
+ "$ref": "CSSComputedStyleProperty"
}
- },
- {
- "name": "returnCount",
- "description": "Count of returned entries from this storage. If pathFilter is empty, it\nis the count of all entries from this storage.",
- "type": "number"
}
]
- }
- ]
- },
- {
- "domain": "Cast",
- "description": "A domain for interacting with Cast, Presentation API, and Remote Playback API\nfunctionalities.",
- "experimental": true,
- "types": [
+ },
{
- "id": "Sink",
- "type": "object",
- "properties": [
+ "name": "getInlineStylesForNode",
+ "description": "Returns the styles defined inline (explicitly in the \"style\" attribute and implicitly, using DOM\nattributes) for a DOM node identified by `nodeId`.",
+ "parameters": [
{
- "name": "name",
- "type": "string"
- },
+ "name": "nodeId",
+ "$ref": "DOM.NodeId"
+ }
+ ],
+ "returns": [
{
- "name": "id",
- "type": "string"
+ "name": "inlineStyle",
+ "description": "Inline style for the specified DOM node.",
+ "optional": true,
+ "$ref": "CSSStyle"
},
{
- "name": "session",
- "description": "Text describing the current session. Present only if there is an active\nsession on the sink.",
+ "name": "attributesStyle",
+ "description": "Attribute-defined element style (e.g. resulting from \"width=20 height=100%\").",
"optional": true,
- "type": "string"
+ "$ref": "CSSStyle"
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "enable",
- "description": "Starts observing for sinks that can be used for tab mirroring, and if set,\nsinks compatible with |presentationUrl| as well. When sinks are found, a\n|sinksUpdated| event is fired.\nAlso starts observing for issue messages. When an issue is added or removed,\nan |issueUpdated| event is fired.",
+ "name": "getMatchedStylesForNode",
+ "description": "Returns requested styles for a DOM node identified by `nodeId`.",
"parameters": [
{
- "name": "presentationUrl",
+ "name": "nodeId",
+ "$ref": "DOM.NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "inlineStyle",
+ "description": "Inline style for the specified DOM node.",
"optional": true,
- "type": "string"
+ "$ref": "CSSStyle"
+ },
+ {
+ "name": "attributesStyle",
+ "description": "Attribute-defined element style (e.g. resulting from \"width=20 height=100%\").",
+ "optional": true,
+ "$ref": "CSSStyle"
+ },
+ {
+ "name": "matchedCSSRules",
+ "description": "CSS rules matching this node, from all applicable stylesheets.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "RuleMatch"
+ }
+ },
+ {
+ "name": "pseudoElements",
+ "description": "Pseudo style matches for this node.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "PseudoElementMatches"
+ }
+ },
+ {
+ "name": "inherited",
+ "description": "A chain of inherited styles (from the immediate node parent up to the DOM tree root).",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "InheritedStyleEntry"
+ }
+ },
+ {
+ "name": "cssKeyframesRules",
+ "description": "A list of CSS keyframed animations matching this node.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "CSSKeyframesRule"
+ }
}
]
},
{
- "name": "disable",
- "description": "Stops observing for sinks and issues."
+ "name": "getMediaQueries",
+ "description": "Returns all media queries parsed by the rendering engine.",
+ "returns": [
+ {
+ "name": "medias",
+ "type": "array",
+ "items": {
+ "$ref": "CSSMedia"
+ }
+ }
+ ]
},
{
- "name": "setSinkToUse",
- "description": "Sets a sink to be used when the web page requests the browser to choose a\nsink via Presentation API, Remote Playback API, or Cast SDK.",
+ "name": "getPlatformFontsForNode",
+ "description": "Requests information about platform fonts which we used to render child TextNodes in the given\nnode.",
"parameters": [
{
- "name": "sinkName",
- "type": "string"
+ "name": "nodeId",
+ "$ref": "DOM.NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "fonts",
+ "description": "Usage statistics for every employed platform font.",
+ "type": "array",
+ "items": {
+ "$ref": "PlatformFontUsage"
+ }
}
]
},
{
- "name": "startTabMirroring",
- "description": "Starts mirroring the tab to the sink.",
+ "name": "getStyleSheetText",
+ "description": "Returns the current textual content for a stylesheet.",
"parameters": [
{
- "name": "sinkName",
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "text",
+ "description": "The stylesheet text.",
"type": "string"
}
]
},
{
- "name": "stopCasting",
- "description": "Stops the active Cast session on the sink.",
+ "name": "trackComputedStyleUpdates",
+ "description": "Starts tracking the given computed styles for updates. The specified array of properties\nreplaces the one previously specified. Pass empty array to disable tracking.\nUse takeComputedStyleUpdates to retrieve the list of nodes that had properties modified.\nThe changes to computed style properties are only tracked for nodes pushed to the front-end\nby the DOM agent. If no changes to the tracked properties occur after the node has been pushed\nto the front-end, no updates will be issued for the node.",
+ "experimental": true,
"parameters": [
{
- "name": "sinkName",
- "type": "string"
+ "name": "propertiesToTrack",
+ "type": "array",
+ "items": {
+ "$ref": "CSSComputedStyleProperty"
+ }
}
]
- }
- ],
- "events": [
+ },
{
- "name": "sinksUpdated",
- "description": "This is fired whenever the list of available sinks changes. A sink is a\ndevice or a software surface that you can cast to.",
- "parameters": [
+ "name": "takeComputedStyleUpdates",
+ "description": "Polls the next batch of computed style updates.",
+ "experimental": true,
+ "returns": [
{
- "name": "sinks",
+ "name": "nodeIds",
+ "description": "The list of node Ids that have their tracked computed styles updated",
"type": "array",
"items": {
- "$ref": "Sink"
+ "$ref": "DOM.NodeId"
}
}
]
},
{
- "name": "issueUpdated",
- "description": "This is fired whenever the outstanding issue/error message changes.\n|issueMessage| is empty if there is no issue.",
- "parameters": [
+ "name": "setEffectivePropertyValueForNode",
+ "description": "Find a rule with the given active property for the given node and set the new value for this\nproperty",
+ "parameters": [
{
- "name": "issueMessage",
+ "name": "nodeId",
+ "description": "The element id for which to set property.",
+ "$ref": "DOM.NodeId"
+ },
+ {
+ "name": "propertyName",
+ "type": "string"
+ },
+ {
+ "name": "value",
"type": "string"
}
]
- }
- ]
- },
- {
- "domain": "DOM",
- "description": "This domain exposes DOM read/write operations. Each DOM Node is represented with its mirror object\nthat has an `id`. This `id` can be used to get additional information on the Node, resolve it into\nthe JavaScript object wrapper, etc. It is important that client receives DOM events only for the\nnodes that are known to the client. Backend keeps track of the nodes that were sent to the client\nand never sends the same node twice. It is client's responsibility to collect information about\nthe nodes that were sent to the client.Note that `iframe` owner elements will return\ncorresponding document elements as their child nodes.
",
- "dependencies": [
- "Runtime"
- ],
- "types": [
- {
- "id": "NodeId",
- "description": "Unique DOM node identifier.",
- "type": "integer"
- },
- {
- "id": "BackendNodeId",
- "description": "Unique DOM node identifier used to reference a node that may not have been pushed to the\nfront-end.",
- "type": "integer"
},
{
- "id": "BackendNode",
- "description": "Backend node with a friendly name.",
- "type": "object",
- "properties": [
+ "name": "setKeyframeKey",
+ "description": "Modifies the keyframe rule key text.",
+ "parameters": [
{
- "name": "nodeType",
- "description": "`Node`'s nodeType.",
- "type": "integer"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "nodeName",
- "description": "`Node`'s nodeName.",
- "type": "string"
+ "name": "range",
+ "$ref": "SourceRange"
},
{
- "name": "backendNodeId",
- "$ref": "BackendNodeId"
+ "name": "keyText",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "keyText",
+ "description": "The resulting key text after modification.",
+ "$ref": "Value"
}
]
},
{
- "id": "PseudoType",
- "description": "Pseudo element type.",
- "type": "string",
- "enum": [
- "first-line",
- "first-letter",
- "before",
- "after",
- "backdrop",
- "selection",
- "first-line-inherited",
- "scrollbar",
- "scrollbar-thumb",
- "scrollbar-button",
- "scrollbar-track",
- "scrollbar-track-piece",
- "scrollbar-corner",
- "resizer",
- "input-list-button"
- ]
- },
- {
- "id": "ShadowRootType",
- "description": "Shadow root type.",
- "type": "string",
- "enum": [
- "user-agent",
- "open",
- "closed"
- ]
- },
- {
- "id": "Node",
- "description": "DOM interaction is implemented in terms of mirror objects that represent the actual DOM nodes.\nDOMNode is a base node mirror type.",
- "type": "object",
- "properties": [
+ "name": "setMediaText",
+ "description": "Modifies the rule selector.",
+ "parameters": [
{
- "name": "nodeId",
- "description": "Node identifier that is passed into the rest of the DOM messages as the `nodeId`. Backend\nwill only push node with given `id` once. It is aware of all requested nodes and will only\nfire DOM events for nodes known to the client.",
- "$ref": "NodeId"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "parentId",
- "description": "The id of the parent node if any.",
- "optional": true,
- "$ref": "NodeId"
+ "name": "range",
+ "$ref": "SourceRange"
},
{
- "name": "backendNodeId",
- "description": "The BackendNodeId for this node.",
- "$ref": "BackendNodeId"
- },
+ "name": "text",
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "nodeType",
- "description": "`Node`'s nodeType.",
- "type": "integer"
- },
+ "name": "media",
+ "description": "The resulting CSS media rule after modification.",
+ "$ref": "CSSMedia"
+ }
+ ]
+ },
+ {
+ "name": "setContainerQueryText",
+ "description": "Modifies the expression of a container query.",
+ "experimental": true,
+ "parameters": [
{
- "name": "nodeName",
- "description": "`Node`'s nodeName.",
- "type": "string"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "localName",
- "description": "`Node`'s localName.",
- "type": "string"
+ "name": "range",
+ "$ref": "SourceRange"
},
{
- "name": "nodeValue",
- "description": "`Node`'s nodeValue.",
+ "name": "text",
"type": "string"
- },
+ }
+ ],
+ "returns": [
{
- "name": "childNodeCount",
- "description": "Child count for `Container` nodes.",
- "optional": true,
- "type": "integer"
- },
+ "name": "containerQuery",
+ "description": "The resulting CSS container query rule after modification.",
+ "$ref": "CSSContainerQuery"
+ }
+ ]
+ },
+ {
+ "name": "setSupportsText",
+ "description": "Modifies the expression of a supports at-rule.",
+ "experimental": true,
+ "parameters": [
{
- "name": "children",
- "description": "Child nodes of this node when requested with children.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Node"
- }
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "attributes",
- "description": "Attributes of the `Element` node in the form of flat array `[name1, value1, name2, value2]`.",
- "optional": true,
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "range",
+ "$ref": "SourceRange"
},
{
- "name": "documentURL",
- "description": "Document URL that `Document` or `FrameOwner` node points to.",
- "optional": true,
+ "name": "text",
"type": "string"
- },
+ }
+ ],
+ "returns": [
{
- "name": "baseURL",
- "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
- "optional": true,
- "type": "string"
- },
+ "name": "supports",
+ "description": "The resulting CSS Supports rule after modification.",
+ "$ref": "CSSSupports"
+ }
+ ]
+ },
+ {
+ "name": "setRuleSelector",
+ "description": "Modifies the rule selector.",
+ "parameters": [
{
- "name": "publicId",
- "description": "`DocumentType`'s publicId.",
- "optional": true,
- "type": "string"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "systemId",
- "description": "`DocumentType`'s systemId.",
- "optional": true,
- "type": "string"
+ "name": "range",
+ "$ref": "SourceRange"
},
{
- "name": "internalSubset",
- "description": "`DocumentType`'s internalSubset.",
- "optional": true,
+ "name": "selector",
"type": "string"
- },
+ }
+ ],
+ "returns": [
{
- "name": "xmlVersion",
- "description": "`Document`'s XML version in case of XML documents.",
- "optional": true,
- "type": "string"
+ "name": "selectorList",
+ "description": "The resulting selector list after modification.",
+ "$ref": "SelectorList"
+ }
+ ]
+ },
+ {
+ "name": "setStyleSheetText",
+ "description": "Sets the new stylesheet text.",
+ "parameters": [
+ {
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
},
{
- "name": "name",
- "description": "`Attr`'s name.",
- "optional": true,
+ "name": "text",
"type": "string"
- },
+ }
+ ],
+ "returns": [
{
- "name": "value",
- "description": "`Attr`'s value.",
+ "name": "sourceMapURL",
+ "description": "URL of source map associated with script (if any).",
"optional": true,
"type": "string"
- },
+ }
+ ]
+ },
+ {
+ "name": "setStyleTexts",
+ "description": "Applies specified style edits one after another in the given order.",
+ "parameters": [
{
- "name": "pseudoType",
- "description": "Pseudo element type for this node.",
- "optional": true,
- "$ref": "PseudoType"
- },
- {
- "name": "shadowRootType",
- "description": "Shadow root type.",
- "optional": true,
- "$ref": "ShadowRootType"
- },
- {
- "name": "frameId",
- "description": "Frame ID for frame owner elements.",
- "optional": true,
- "$ref": "Page.FrameId"
- },
- {
- "name": "contentDocument",
- "description": "Content document for frame owner elements.",
- "optional": true,
- "$ref": "Node"
- },
- {
- "name": "shadowRoots",
- "description": "Shadow root list for given element host.",
- "optional": true,
+ "name": "edits",
"type": "array",
"items": {
- "$ref": "Node"
+ "$ref": "StyleDeclarationEdit"
}
- },
- {
- "name": "templateContent",
- "description": "Content document fragment for template elements.",
- "optional": true,
- "$ref": "Node"
- },
+ }
+ ],
+ "returns": [
{
- "name": "pseudoElements",
- "description": "Pseudo elements associated with this node.",
- "optional": true,
+ "name": "styles",
+ "description": "The resulting styles after modification.",
"type": "array",
"items": {
- "$ref": "Node"
+ "$ref": "CSSStyle"
}
- },
+ }
+ ]
+ },
+ {
+ "name": "startRuleUsageTracking",
+ "description": "Enables the selector recording."
+ },
+ {
+ "name": "stopRuleUsageTracking",
+ "description": "Stop tracking rule usage and return the list of rules that were used since last call to\n`takeCoverageDelta` (or since start of coverage instrumentation)",
+ "returns": [
{
- "name": "importedDocument",
- "description": "Import document for the HTMLImport links.",
- "optional": true,
- "$ref": "Node"
- },
+ "name": "ruleUsage",
+ "type": "array",
+ "items": {
+ "$ref": "RuleUsage"
+ }
+ }
+ ]
+ },
+ {
+ "name": "takeCoverageDelta",
+ "description": "Obtain list of rules that became used since last call to this method (or since start of coverage\ninstrumentation)",
+ "returns": [
{
- "name": "distributedNodes",
- "description": "Distributed nodes for given insertion point.",
- "optional": true,
+ "name": "coverage",
"type": "array",
"items": {
- "$ref": "BackendNode"
+ "$ref": "RuleUsage"
}
},
{
- "name": "isSVG",
- "description": "Whether the node is SVG.",
- "optional": true,
- "type": "boolean"
+ "name": "timestamp",
+ "description": "Monotonically increasing time, in seconds.",
+ "type": "number"
}
]
},
{
- "id": "RGBA",
- "description": "A structure holding an RGBA color.",
- "type": "object",
- "properties": [
+ "name": "setLocalFontsEnabled",
+ "description": "Enables/disables rendering of local CSS fonts (enabled by default).",
+ "experimental": true,
+ "parameters": [
{
- "name": "r",
- "description": "The red component, in the [0-255] range.",
- "type": "integer"
- },
+ "name": "enabled",
+ "description": "Whether rendering of local fonts is enabled.",
+ "type": "boolean"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "fontsUpdated",
+ "description": "Fires whenever a web font is updated. A non-empty font parameter indicates a successfully loaded\nweb font",
+ "parameters": [
{
- "name": "g",
- "description": "The green component, in the [0-255] range.",
- "type": "integer"
- },
+ "name": "font",
+ "description": "The web font that has loaded.",
+ "optional": true,
+ "$ref": "FontFace"
+ }
+ ]
+ },
+ {
+ "name": "mediaQueryResultChanged",
+ "description": "Fires whenever a MediaQuery result changes (for example, after a browser window has been\nresized.) The current implementation considers only viewport-dependent media features."
+ },
+ {
+ "name": "styleSheetAdded",
+ "description": "Fired whenever an active document stylesheet is added.",
+ "parameters": [
{
- "name": "b",
- "description": "The blue component, in the [0-255] range.",
- "type": "integer"
- },
+ "name": "header",
+ "description": "Added stylesheet metainfo.",
+ "$ref": "CSSStyleSheetHeader"
+ }
+ ]
+ },
+ {
+ "name": "styleSheetChanged",
+ "description": "Fired whenever a stylesheet is changed as a result of the client operation.",
+ "parameters": [
{
- "name": "a",
- "description": "The alpha component, in the [0-1] range (default: 1).",
- "optional": true,
- "type": "number"
+ "name": "styleSheetId",
+ "$ref": "StyleSheetId"
}
]
},
{
- "id": "Quad",
- "description": "An array of quad vertices, x immediately followed by y for each point, points clock-wise.",
- "type": "array",
- "items": {
- "type": "number"
- }
+ "name": "styleSheetRemoved",
+ "description": "Fired whenever an active document stylesheet is removed.",
+ "parameters": [
+ {
+ "name": "styleSheetId",
+ "description": "Identifier of the removed stylesheet.",
+ "$ref": "StyleSheetId"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "CacheStorage",
+ "experimental": true,
+ "types": [
+ {
+ "id": "CacheId",
+ "description": "Unique identifier of the Cache object.",
+ "type": "string"
},
{
- "id": "BoxModel",
- "description": "Box model.",
+ "id": "CachedResponseType",
+ "description": "type of HTTP response cached",
+ "type": "string",
+ "enum": [
+ "basic",
+ "cors",
+ "default",
+ "error",
+ "opaqueResponse",
+ "opaqueRedirect"
+ ]
+ },
+ {
+ "id": "DataEntry",
+ "description": "Data entry.",
"type": "object",
"properties": [
{
- "name": "content",
- "description": "Content box",
- "$ref": "Quad"
+ "name": "requestURL",
+ "description": "Request URL.",
+ "type": "string"
},
{
- "name": "padding",
- "description": "Padding box",
- "$ref": "Quad"
+ "name": "requestMethod",
+ "description": "Request method.",
+ "type": "string"
},
{
- "name": "border",
- "description": "Border box",
- "$ref": "Quad"
+ "name": "requestHeaders",
+ "description": "Request headers",
+ "type": "array",
+ "items": {
+ "$ref": "Header"
+ }
},
{
- "name": "margin",
- "description": "Margin box",
- "$ref": "Quad"
+ "name": "responseTime",
+ "description": "Number of seconds since epoch.",
+ "type": "number"
},
{
- "name": "width",
- "description": "Node width",
+ "name": "responseStatus",
+ "description": "HTTP response status code.",
"type": "integer"
},
{
- "name": "height",
- "description": "Node height",
- "type": "integer"
+ "name": "responseStatusText",
+ "description": "HTTP response status text.",
+ "type": "string"
},
{
- "name": "shapeOutside",
- "description": "Shape outside coordinates",
- "optional": true,
- "$ref": "ShapeOutsideInfo"
+ "name": "responseType",
+ "description": "HTTP response type",
+ "$ref": "CachedResponseType"
+ },
+ {
+ "name": "responseHeaders",
+ "description": "Response headers",
+ "type": "array",
+ "items": {
+ "$ref": "Header"
+ }
}
]
},
{
- "id": "ShapeOutsideInfo",
- "description": "CSS Shape Outside details.",
+ "id": "Cache",
+ "description": "Cache identifier.",
"type": "object",
"properties": [
{
- "name": "bounds",
- "description": "Shape bounds",
- "$ref": "Quad"
+ "name": "cacheId",
+ "description": "An opaque unique id of the cache.",
+ "$ref": "CacheId"
},
{
- "name": "shape",
- "description": "Shape coordinate details",
- "type": "array",
- "items": {
- "type": "any"
- }
+ "name": "securityOrigin",
+ "description": "Security origin of the cache.",
+ "type": "string"
},
{
- "name": "marginShape",
- "description": "Margin shape bounds",
- "type": "array",
- "items": {
- "type": "any"
- }
+ "name": "cacheName",
+ "description": "The name of the cache.",
+ "type": "string"
}
]
},
{
- "id": "Rect",
- "description": "Rectangle.",
+ "id": "Header",
"type": "object",
"properties": [
{
- "name": "x",
- "description": "X coordinate",
- "type": "number"
- },
- {
- "name": "y",
- "description": "Y coordinate",
- "type": "number"
+ "name": "name",
+ "type": "string"
},
{
- "name": "width",
- "description": "Rectangle width",
- "type": "number"
- },
+ "name": "value",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "CachedResponse",
+ "description": "Cached response",
+ "type": "object",
+ "properties": [
{
- "name": "height",
- "description": "Rectangle height",
- "type": "number"
+ "name": "body",
+ "description": "Entry content, base64-encoded. (Encoded as a base64 string when passed over JSON)",
+ "type": "string"
}
]
}
],
"commands": [
{
- "name": "collectClassNamesFromSubtree",
- "description": "Collects class names for the node with given id and all of it's child nodes.",
- "experimental": true,
+ "name": "deleteCache",
+ "description": "Deletes a cache.",
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node to collect class names.",
- "$ref": "NodeId"
+ "name": "cacheId",
+ "description": "Id of cache for deletion.",
+ "$ref": "CacheId"
+ }
+ ]
+ },
+ {
+ "name": "deleteEntry",
+ "description": "Deletes a cache entry.",
+ "parameters": [
+ {
+ "name": "cacheId",
+ "description": "Id of cache where the entry will be deleted.",
+ "$ref": "CacheId"
+ },
+ {
+ "name": "request",
+ "description": "URL spec of the request.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "requestCacheNames",
+ "description": "Requests cache names.",
+ "parameters": [
+ {
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
}
],
"returns": [
{
- "name": "classNames",
- "description": "Class name list.",
+ "name": "caches",
+ "description": "Caches for the security origin.",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "Cache"
}
}
]
},
{
- "name": "copyTo",
- "description": "Creates a deep copy of the specified node and places it into the target container before the\ngiven anchor.",
- "experimental": true,
+ "name": "requestCachedResponse",
+ "description": "Fetches cache entry.",
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node to copy.",
- "$ref": "NodeId"
+ "name": "cacheId",
+ "description": "Id of cache that contains the entry.",
+ "$ref": "CacheId"
},
{
- "name": "targetNodeId",
- "description": "Id of the element to drop the copy into.",
- "$ref": "NodeId"
+ "name": "requestURL",
+ "description": "URL spec of the request.",
+ "type": "string"
},
{
- "name": "insertBeforeNodeId",
- "description": "Drop the copy before this node (if absent, the copy becomes the last child of\n`targetNodeId`).",
- "optional": true,
- "$ref": "NodeId"
+ "name": "requestHeaders",
+ "description": "headers of the request.",
+ "type": "array",
+ "items": {
+ "$ref": "Header"
+ }
}
],
"returns": [
{
- "name": "nodeId",
- "description": "Id of the node clone.",
- "$ref": "NodeId"
+ "name": "response",
+ "description": "Response read from the cache.",
+ "$ref": "CachedResponse"
}
]
},
{
- "name": "describeNode",
- "description": "Describes node given its id, does not require domain to be enabled. Does not start tracking any\nobjects, can be used for automation.",
+ "name": "requestEntries",
+ "description": "Requests data from cache.",
"parameters": [
{
- "name": "nodeId",
- "description": "Identifier of the node.",
- "optional": true,
- "$ref": "NodeId"
- },
- {
- "name": "backendNodeId",
- "description": "Identifier of the backend node.",
- "optional": true,
- "$ref": "BackendNodeId"
+ "name": "cacheId",
+ "description": "ID of cache to get entries from.",
+ "$ref": "CacheId"
},
{
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
+ "name": "skipCount",
+ "description": "Number of records to skip.",
"optional": true,
- "$ref": "Runtime.RemoteObjectId"
+ "type": "integer"
},
{
- "name": "depth",
- "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
+ "name": "pageSize",
+ "description": "Number of records to fetch.",
"optional": true,
"type": "integer"
},
{
- "name": "pierce",
- "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
+ "name": "pathFilter",
+ "description": "If present, only return the entries containing this substring in the path",
"optional": true,
- "type": "boolean"
+ "type": "string"
}
],
"returns": [
{
- "name": "node",
- "description": "Node description.",
- "$ref": "Node"
+ "name": "cacheDataEntries",
+ "description": "Array of object store data entries.",
+ "type": "array",
+ "items": {
+ "$ref": "DataEntry"
+ }
+ },
+ {
+ "name": "returnCount",
+ "description": "Count of returned entries from this storage. If pathFilter is empty, it\nis the count of all entries from this storage.",
+ "type": "number"
}
]
- },
+ }
+ ]
+ },
+ {
+ "domain": "Cast",
+ "description": "A domain for interacting with Cast, Presentation API, and Remote Playback API\nfunctionalities.",
+ "experimental": true,
+ "types": [
{
- "name": "disable",
- "description": "Disables DOM agent for the given page."
- },
+ "id": "Sink",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "id",
+ "type": "string"
+ },
+ {
+ "name": "session",
+ "description": "Text describing the current session. Present only if there is an active\nsession on the sink.",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ }
+ ],
+ "commands": [
{
- "name": "discardSearchResults",
- "description": "Discards search results from the session with the given id. `getSearchResults` should no longer\nbe called for that search.",
- "experimental": true,
+ "name": "enable",
+ "description": "Starts observing for sinks that can be used for tab mirroring, and if set,\nsinks compatible with |presentationUrl| as well. When sinks are found, a\n|sinksUpdated| event is fired.\nAlso starts observing for issue messages. When an issue is added or removed,\nan |issueUpdated| event is fired.",
"parameters": [
{
- "name": "searchId",
- "description": "Unique search session identifier.",
+ "name": "presentationUrl",
+ "optional": true,
"type": "string"
}
]
},
{
- "name": "enable",
- "description": "Enables DOM agent for the given page."
+ "name": "disable",
+ "description": "Stops observing for sinks and issues."
},
{
- "name": "focus",
- "description": "Focuses the given element.",
+ "name": "setSinkToUse",
+ "description": "Sets a sink to be used when the web page requests the browser to choose a\nsink via Presentation API, Remote Playback API, or Cast SDK.",
"parameters": [
{
- "name": "nodeId",
- "description": "Identifier of the node.",
- "optional": true,
- "$ref": "NodeId"
- },
- {
- "name": "backendNodeId",
- "description": "Identifier of the backend node.",
- "optional": true,
- "$ref": "BackendNodeId"
- },
- {
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
- "optional": true,
- "$ref": "Runtime.RemoteObjectId"
+ "name": "sinkName",
+ "type": "string"
}
]
},
{
- "name": "getAttributes",
- "description": "Returns attributes for the specified node.",
+ "name": "startDesktopMirroring",
+ "description": "Starts mirroring the desktop to the sink.",
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node to retrieve attibutes for.",
- "$ref": "NodeId"
- }
- ],
- "returns": [
- {
- "name": "attributes",
- "description": "An interleaved array of node attribute names and values.",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "sinkName",
+ "type": "string"
}
]
},
{
- "name": "getBoxModel",
- "description": "Returns boxes for the given node.",
- "parameters": [
- {
- "name": "nodeId",
- "description": "Identifier of the node.",
- "optional": true,
- "$ref": "NodeId"
- },
- {
- "name": "backendNodeId",
- "description": "Identifier of the backend node.",
- "optional": true,
- "$ref": "BackendNodeId"
- },
- {
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
- "optional": true,
- "$ref": "Runtime.RemoteObjectId"
- }
- ],
- "returns": [
+ "name": "startTabMirroring",
+ "description": "Starts mirroring the tab to the sink.",
+ "parameters": [
{
- "name": "model",
- "description": "Box model for the node.",
- "$ref": "BoxModel"
+ "name": "sinkName",
+ "type": "string"
}
]
},
{
- "name": "getContentQuads",
- "description": "Returns quads that describe node position on the page. This method\nmight return multiple quads for inline nodes.",
- "experimental": true,
+ "name": "stopCasting",
+ "description": "Stops the active Cast session on the sink.",
"parameters": [
{
- "name": "nodeId",
- "description": "Identifier of the node.",
- "optional": true,
- "$ref": "NodeId"
- },
- {
- "name": "backendNodeId",
- "description": "Identifier of the backend node.",
- "optional": true,
- "$ref": "BackendNodeId"
- },
- {
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
- "optional": true,
- "$ref": "Runtime.RemoteObjectId"
+ "name": "sinkName",
+ "type": "string"
}
- ],
- "returns": [
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "sinksUpdated",
+ "description": "This is fired whenever the list of available sinks changes. A sink is a\ndevice or a software surface that you can cast to.",
+ "parameters": [
{
- "name": "quads",
- "description": "Quads that describe node layout relative to viewport.",
+ "name": "sinks",
"type": "array",
"items": {
- "$ref": "Quad"
+ "$ref": "Sink"
}
}
]
},
{
- "name": "getDocument",
- "description": "Returns the root DOM node (and optionally the subtree) to the caller.",
+ "name": "issueUpdated",
+ "description": "This is fired whenever the outstanding issue/error message changes.\n|issueMessage| is empty if there is no issue.",
"parameters": [
{
- "name": "depth",
- "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
- "optional": true,
- "type": "integer"
- },
- {
- "name": "pierce",
- "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
- {
- "name": "root",
- "description": "Resulting node.",
- "$ref": "Node"
+ "name": "issueMessage",
+ "type": "string"
}
]
+ }
+ ]
+ },
+ {
+ "domain": "DOM",
+ "description": "This domain exposes DOM read/write operations. Each DOM Node is represented with its mirror object\nthat has an `id`. This `id` can be used to get additional information on the Node, resolve it into\nthe JavaScript object wrapper, etc. It is important that client receives DOM events only for the\nnodes that are known to the client. Backend keeps track of the nodes that were sent to the client\nand never sends the same node twice. It is client's responsibility to collect information about\nthe nodes that were sent to the client.Note that `iframe` owner elements will return\ncorresponding document elements as their child nodes.
",
+ "dependencies": [
+ "Runtime"
+ ],
+ "types": [
+ {
+ "id": "NodeId",
+ "description": "Unique DOM node identifier.",
+ "type": "integer"
},
{
- "name": "getFlattenedDocument",
- "description": "Returns the root DOM node (and optionally the subtree) to the caller.",
- "parameters": [
- {
- "name": "depth",
- "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
- "optional": true,
- "type": "integer"
- },
- {
- "name": "pierce",
- "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
- {
- "name": "nodes",
- "description": "Resulting node.",
- "type": "array",
- "items": {
- "$ref": "Node"
- }
- }
- ]
+ "id": "BackendNodeId",
+ "description": "Unique DOM node identifier used to reference a node that may not have been pushed to the\nfront-end.",
+ "type": "integer"
},
{
- "name": "getNodeForLocation",
- "description": "Returns node id at given location. Depending on whether DOM domain is enabled, nodeId is\neither returned or not.",
- "experimental": true,
- "parameters": [
+ "id": "BackendNode",
+ "description": "Backend node with a friendly name.",
+ "type": "object",
+ "properties": [
{
- "name": "x",
- "description": "X coordinate.",
+ "name": "nodeType",
+ "description": "`Node`'s nodeType.",
"type": "integer"
},
{
- "name": "y",
- "description": "Y coordinate.",
- "type": "integer"
+ "name": "nodeName",
+ "description": "`Node`'s nodeName.",
+ "type": "string"
},
- {
- "name": "includeUserAgentShadowDOM",
- "description": "False to skip to the nearest non-UA shadow root ancestor (default: false).",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
{
"name": "backendNodeId",
- "description": "Resulting node.",
"$ref": "BackendNodeId"
- },
- {
- "name": "nodeId",
- "description": "Id of the node at given coordinates, only when enabled and requested document.",
- "optional": true,
- "$ref": "NodeId"
}
]
},
{
- "name": "getOuterHTML",
- "description": "Returns node's HTML markup.",
- "parameters": [
+ "id": "PseudoType",
+ "description": "Pseudo element type.",
+ "type": "string",
+ "enum": [
+ "first-line",
+ "first-letter",
+ "before",
+ "after",
+ "marker",
+ "backdrop",
+ "selection",
+ "target-text",
+ "spelling-error",
+ "grammar-error",
+ "highlight",
+ "first-line-inherited",
+ "scrollbar",
+ "scrollbar-thumb",
+ "scrollbar-button",
+ "scrollbar-track",
+ "scrollbar-track-piece",
+ "scrollbar-corner",
+ "resizer",
+ "input-list-button",
+ "transition",
+ "transition-container",
+ "transition-old-content",
+ "transition-new-content"
+ ]
+ },
+ {
+ "id": "ShadowRootType",
+ "description": "Shadow root type.",
+ "type": "string",
+ "enum": [
+ "user-agent",
+ "open",
+ "closed"
+ ]
+ },
+ {
+ "id": "CompatibilityMode",
+ "description": "Document compatibility mode.",
+ "type": "string",
+ "enum": [
+ "QuirksMode",
+ "LimitedQuirksMode",
+ "NoQuirksMode"
+ ]
+ },
+ {
+ "id": "Node",
+ "description": "DOM interaction is implemented in terms of mirror objects that represent the actual DOM nodes.\nDOMNode is a base node mirror type.",
+ "type": "object",
+ "properties": [
{
"name": "nodeId",
- "description": "Identifier of the node.",
+ "description": "Node identifier that is passed into the rest of the DOM messages as the `nodeId`. Backend\nwill only push node with given `id` once. It is aware of all requested nodes and will only\nfire DOM events for nodes known to the client.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "parentId",
+ "description": "The id of the parent node if any.",
"optional": true,
"$ref": "NodeId"
},
{
"name": "backendNodeId",
- "description": "Identifier of the backend node.",
- "optional": true,
+ "description": "The BackendNodeId for this node.",
"$ref": "BackendNodeId"
},
{
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
- "optional": true,
- "$ref": "Runtime.RemoteObjectId"
- }
- ],
- "returns": [
+ "name": "nodeType",
+ "description": "`Node`'s nodeType.",
+ "type": "integer"
+ },
{
- "name": "outerHTML",
- "description": "Outer HTML markup.",
+ "name": "nodeName",
+ "description": "`Node`'s nodeName.",
"type": "string"
- }
- ]
- },
- {
- "name": "getRelayoutBoundary",
- "description": "Returns the id of the nearest ancestor that is a relayout boundary.",
- "experimental": true,
- "parameters": [
+ },
{
- "name": "nodeId",
- "description": "Id of the node.",
- "$ref": "NodeId"
- }
- ],
- "returns": [
+ "name": "localName",
+ "description": "`Node`'s localName.",
+ "type": "string"
+ },
{
- "name": "nodeId",
- "description": "Relayout boundary node id for the given node.",
- "$ref": "NodeId"
- }
- ]
- },
- {
- "name": "getSearchResults",
- "description": "Returns search results from given `fromIndex` to given `toIndex` from the search with the given\nidentifier.",
- "experimental": true,
- "parameters": [
- {
- "name": "searchId",
- "description": "Unique search session identifier.",
+ "name": "nodeValue",
+ "description": "`Node`'s nodeValue.",
"type": "string"
},
{
- "name": "fromIndex",
- "description": "Start index of the search result to be returned.",
+ "name": "childNodeCount",
+ "description": "Child count for `Container` nodes.",
+ "optional": true,
"type": "integer"
},
{
- "name": "toIndex",
- "description": "End index of the search result to be returned.",
- "type": "integer"
- }
- ],
- "returns": [
- {
- "name": "nodeIds",
- "description": "Ids of the search result nodes.",
+ "name": "children",
+ "description": "Child nodes of this node when requested with children.",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "NodeId"
+ "$ref": "Node"
}
- }
- ]
- },
- {
- "name": "hideHighlight",
- "description": "Hides any highlight.",
- "redirect": "Overlay"
- },
- {
- "name": "highlightNode",
- "description": "Highlights DOM node.",
- "redirect": "Overlay"
- },
- {
- "name": "highlightRect",
- "description": "Highlights given rectangle.",
- "redirect": "Overlay"
- },
- {
- "name": "markUndoableState",
- "description": "Marks last undoable state.",
- "experimental": true
- },
- {
- "name": "moveTo",
- "description": "Moves node into the new container, places it before the given anchor.",
- "parameters": [
+ },
{
- "name": "nodeId",
- "description": "Id of the node to move.",
- "$ref": "NodeId"
+ "name": "attributes",
+ "description": "Attributes of the `Element` node in the form of flat array `[name1, value1, name2, value2]`.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
},
{
- "name": "targetNodeId",
- "description": "Id of the element to drop the moved node into.",
- "$ref": "NodeId"
+ "name": "documentURL",
+ "description": "Document URL that `Document` or `FrameOwner` node points to.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "insertBeforeNodeId",
- "description": "Drop node before this one (if absent, the moved node becomes the last child of\n`targetNodeId`).",
+ "name": "baseURL",
+ "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
"optional": true,
- "$ref": "NodeId"
- }
- ],
- "returns": [
+ "type": "string"
+ },
{
- "name": "nodeId",
- "description": "New id of the moved node.",
- "$ref": "NodeId"
- }
- ]
- },
- {
- "name": "performSearch",
- "description": "Searches for a given string in the DOM tree. Use `getSearchResults` to access search results or\n`cancelSearch` to end this search session.",
- "experimental": true,
- "parameters": [
+ "name": "publicId",
+ "description": "`DocumentType`'s publicId.",
+ "optional": true,
+ "type": "string"
+ },
{
- "name": "query",
- "description": "Plain text or query selector or XPath search query.",
+ "name": "systemId",
+ "description": "`DocumentType`'s systemId.",
+ "optional": true,
"type": "string"
},
{
- "name": "includeUserAgentShadowDOM",
- "description": "True to search in user agent shadow DOM.",
+ "name": "internalSubset",
+ "description": "`DocumentType`'s internalSubset.",
"optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
+ "type": "string"
+ },
{
- "name": "searchId",
- "description": "Unique search session identifier.",
+ "name": "xmlVersion",
+ "description": "`Document`'s XML version in case of XML documents.",
+ "optional": true,
"type": "string"
},
{
- "name": "resultCount",
- "description": "Number of search results.",
- "type": "integer"
- }
- ]
- },
- {
- "name": "pushNodeByPathToFrontend",
- "description": "Requests that the node is sent to the caller given its path. // FIXME, use XPath",
- "experimental": true,
- "parameters": [
+ "name": "name",
+ "description": "`Attr`'s name.",
+ "optional": true,
+ "type": "string"
+ },
{
- "name": "path",
- "description": "Path to node in the proprietary format.",
+ "name": "value",
+ "description": "`Attr`'s value.",
+ "optional": true,
"type": "string"
- }
- ],
- "returns": [
+ },
{
- "name": "nodeId",
- "description": "Id of the node for given path.",
- "$ref": "NodeId"
- }
- ]
- },
- {
- "name": "pushNodesByBackendIdsToFrontend",
- "description": "Requests that a batch of nodes is sent to the caller given their backend node ids.",
- "experimental": true,
- "parameters": [
+ "name": "pseudoType",
+ "description": "Pseudo element type for this node.",
+ "optional": true,
+ "$ref": "PseudoType"
+ },
{
- "name": "backendNodeIds",
- "description": "The array of backend node ids.",
+ "name": "shadowRootType",
+ "description": "Shadow root type.",
+ "optional": true,
+ "$ref": "ShadowRootType"
+ },
+ {
+ "name": "frameId",
+ "description": "Frame ID for frame owner elements.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "contentDocument",
+ "description": "Content document for frame owner elements.",
+ "optional": true,
+ "$ref": "Node"
+ },
+ {
+ "name": "shadowRoots",
+ "description": "Shadow root list for given element host.",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "BackendNodeId"
+ "$ref": "Node"
}
- }
- ],
- "returns": [
+ },
{
- "name": "nodeIds",
- "description": "The array of ids of pushed nodes that correspond to the backend ids specified in\nbackendNodeIds.",
+ "name": "templateContent",
+ "description": "Content document fragment for template elements.",
+ "optional": true,
+ "$ref": "Node"
+ },
+ {
+ "name": "pseudoElements",
+ "description": "Pseudo elements associated with this node.",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "NodeId"
+ "$ref": "Node"
}
- }
- ]
- },
- {
- "name": "querySelector",
- "description": "Executes `querySelector` on a given node.",
- "parameters": [
+ },
{
- "name": "nodeId",
- "description": "Id of the node to query upon.",
- "$ref": "NodeId"
+ "name": "importedDocument",
+ "description": "Deprecated, as the HTML Imports API has been removed (crbug.com/937746).\nThis property used to return the imported document for the HTMLImport links.\nThe property is always undefined now.",
+ "deprecated": true,
+ "optional": true,
+ "$ref": "Node"
},
{
- "name": "selector",
- "description": "Selector string.",
- "type": "string"
- }
- ],
- "returns": [
+ "name": "distributedNodes",
+ "description": "Distributed nodes for given insertion point.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "BackendNode"
+ }
+ },
{
- "name": "nodeId",
- "description": "Query selector result.",
- "$ref": "NodeId"
+ "name": "isSVG",
+ "description": "Whether the node is SVG.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "compatibilityMode",
+ "optional": true,
+ "$ref": "CompatibilityMode"
}
]
},
{
- "name": "querySelectorAll",
- "description": "Executes `querySelectorAll` on a given node.",
- "parameters": [
- {
- "name": "nodeId",
- "description": "Id of the node to query upon.",
- "$ref": "NodeId"
- },
+ "id": "RGBA",
+ "description": "A structure holding an RGBA color.",
+ "type": "object",
+ "properties": [
{
- "name": "selector",
- "description": "Selector string.",
- "type": "string"
- }
- ],
- "returns": [
+ "name": "r",
+ "description": "The red component, in the [0-255] range.",
+ "type": "integer"
+ },
{
- "name": "nodeIds",
- "description": "Query selector result.",
- "type": "array",
- "items": {
- "$ref": "NodeId"
- }
+ "name": "g",
+ "description": "The green component, in the [0-255] range.",
+ "type": "integer"
+ },
+ {
+ "name": "b",
+ "description": "The blue component, in the [0-255] range.",
+ "type": "integer"
+ },
+ {
+ "name": "a",
+ "description": "The alpha component, in the [0-1] range (default: 1).",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "name": "redo",
- "description": "Re-does the last undone action.",
- "experimental": true
+ "id": "Quad",
+ "description": "An array of quad vertices, x immediately followed by y for each point, points clock-wise.",
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
},
{
- "name": "removeAttribute",
- "description": "Removes attribute with given name from an element with given id.",
- "parameters": [
+ "id": "BoxModel",
+ "description": "Box model.",
+ "type": "object",
+ "properties": [
{
- "name": "nodeId",
- "description": "Id of the element to remove attribute from.",
- "$ref": "NodeId"
+ "name": "content",
+ "description": "Content box",
+ "$ref": "Quad"
},
{
- "name": "name",
- "description": "Name of the attribute to remove.",
- "type": "string"
- }
- ]
- },
- {
- "name": "removeNode",
- "description": "Removes node with given id.",
- "parameters": [
+ "name": "padding",
+ "description": "Padding box",
+ "$ref": "Quad"
+ },
{
- "name": "nodeId",
- "description": "Id of the node to remove.",
- "$ref": "NodeId"
- }
- ]
- },
- {
- "name": "requestChildNodes",
- "description": "Requests that children of the node with given id are returned to the caller in form of\n`setChildNodes` events where not only immediate children are retrieved, but all children down to\nthe specified depth.",
- "parameters": [
+ "name": "border",
+ "description": "Border box",
+ "$ref": "Quad"
+ },
{
- "name": "nodeId",
- "description": "Id of the node to get children for.",
- "$ref": "NodeId"
+ "name": "margin",
+ "description": "Margin box",
+ "$ref": "Quad"
},
{
- "name": "depth",
- "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
- "optional": true,
+ "name": "width",
+ "description": "Node width",
"type": "integer"
},
{
- "name": "pierce",
- "description": "Whether or not iframes and shadow roots should be traversed when returning the sub-tree\n(default is false).",
+ "name": "height",
+ "description": "Node height",
+ "type": "integer"
+ },
+ {
+ "name": "shapeOutside",
+ "description": "Shape outside coordinates",
"optional": true,
- "type": "boolean"
+ "$ref": "ShapeOutsideInfo"
}
]
},
{
- "name": "requestNode",
- "description": "Requests that the node is sent to the caller given the JavaScript node object reference. All\nnodes that form the path from the node to the root are also sent to the client as a series of\n`setChildNodes` notifications.",
- "parameters": [
+ "id": "ShapeOutsideInfo",
+ "description": "CSS Shape Outside details.",
+ "type": "object",
+ "properties": [
{
- "name": "objectId",
- "description": "JavaScript object id to convert into node.",
- "$ref": "Runtime.RemoteObjectId"
- }
- ],
- "returns": [
+ "name": "bounds",
+ "description": "Shape bounds",
+ "$ref": "Quad"
+ },
{
- "name": "nodeId",
- "description": "Node id for given object.",
- "$ref": "NodeId"
+ "name": "shape",
+ "description": "Shape coordinate details",
+ "type": "array",
+ "items": {
+ "type": "any"
+ }
+ },
+ {
+ "name": "marginShape",
+ "description": "Margin shape bounds",
+ "type": "array",
+ "items": {
+ "type": "any"
+ }
}
]
},
{
- "name": "resolveNode",
- "description": "Resolves the JavaScript node object for a given NodeId or BackendNodeId.",
- "parameters": [
+ "id": "Rect",
+ "description": "Rectangle.",
+ "type": "object",
+ "properties": [
{
- "name": "nodeId",
- "description": "Id of the node to resolve.",
- "optional": true,
- "$ref": "NodeId"
+ "name": "x",
+ "description": "X coordinate",
+ "type": "number"
},
{
- "name": "backendNodeId",
- "description": "Backend identifier of the node to resolve.",
- "optional": true,
- "$ref": "DOM.BackendNodeId"
+ "name": "y",
+ "description": "Y coordinate",
+ "type": "number"
},
{
- "name": "objectGroup",
- "description": "Symbolic group name that can be used to release multiple objects.",
- "optional": true,
- "type": "string"
+ "name": "width",
+ "description": "Rectangle width",
+ "type": "number"
},
{
- "name": "executionContextId",
- "description": "Execution context in which to resolve the node.",
- "optional": true,
- "$ref": "Runtime.ExecutionContextId"
- }
- ],
- "returns": [
- {
- "name": "object",
- "description": "JavaScript object wrapper for given node.",
- "$ref": "Runtime.RemoteObject"
+ "name": "height",
+ "description": "Rectangle height",
+ "type": "number"
}
]
},
{
- "name": "setAttributeValue",
- "description": "Sets attribute for an element with given id.",
- "parameters": [
- {
- "name": "nodeId",
- "description": "Id of the element to set attribute for.",
- "$ref": "NodeId"
- },
+ "id": "CSSComputedStyleProperty",
+ "type": "object",
+ "properties": [
{
"name": "name",
- "description": "Attribute name.",
+ "description": "Computed style property name.",
"type": "string"
},
{
"name": "value",
- "description": "Attribute value.",
+ "description": "Computed style property value.",
"type": "string"
}
]
+ }
+ ],
+ "commands": [
+ {
+ "name": "collectClassNamesFromSubtree",
+ "description": "Collects class names for the node with given id and all of it's child nodes.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node to collect class names.",
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "classNames",
+ "description": "Class name list.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
},
{
- "name": "setAttributesAsText",
- "description": "Sets attributes on element with given id. This method is useful when user edits some existing\nattribute value and types in several attribute name/value pairs.",
+ "name": "copyTo",
+ "description": "Creates a deep copy of the specified node and places it into the target container before the\ngiven anchor.",
+ "experimental": true,
"parameters": [
{
"name": "nodeId",
- "description": "Id of the element to set attributes for.",
+ "description": "Id of the node to copy.",
"$ref": "NodeId"
},
{
- "name": "text",
- "description": "Text with a number of attributes. Will parse this text using HTML parser.",
- "type": "string"
+ "name": "targetNodeId",
+ "description": "Id of the element to drop the copy into.",
+ "$ref": "NodeId"
},
{
- "name": "name",
- "description": "Attribute name to replace with new attributes derived from text in case text parsed\nsuccessfully.",
+ "name": "insertBeforeNodeId",
+ "description": "Drop the copy before this node (if absent, the copy becomes the last child of\n`targetNodeId`).",
"optional": true,
- "type": "string"
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node clone.",
+ "$ref": "NodeId"
}
]
},
{
- "name": "setFileInputFiles",
- "description": "Sets files for the given file input element.",
+ "name": "describeNode",
+ "description": "Describes node given its id, does not require domain to be enabled. Does not start tracking any\nobjects, can be used for automation.",
"parameters": [
- {
- "name": "files",
- "description": "Array of file paths to set.",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
{
"name": "nodeId",
"description": "Identifier of the node.",
@@ -4148,263 +4876,414 @@
"description": "JavaScript object id of the node wrapper.",
"optional": true,
"$ref": "Runtime.RemoteObjectId"
- }
- ]
- },
- {
- "name": "getFileInfo",
- "description": "Returns file information for the given\nFile wrapper.",
- "experimental": true,
- "parameters": [
+ },
{
- "name": "objectId",
- "description": "JavaScript object id of the node wrapper.",
- "$ref": "Runtime.RemoteObjectId"
+ "name": "depth",
+ "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
{
- "name": "path",
- "type": "string"
+ "name": "node",
+ "description": "Node description.",
+ "$ref": "Node"
}
]
},
{
- "name": "setInspectedNode",
- "description": "Enables console to refer to the node with given id via $x (see Command Line API for more details\n$x functions).",
+ "name": "scrollIntoViewIfNeeded",
+ "description": "Scrolls the specified rect of the given node into view if not already visible.\nNote: exactly one between nodeId, backendNodeId and objectId should be passed\nto identify the node.",
"experimental": true,
"parameters": [
{
"name": "nodeId",
- "description": "DOM node id to be accessible by means of $x command line API.",
+ "description": "Identifier of the node.",
+ "optional": true,
"$ref": "NodeId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node.",
+ "optional": true,
+ "$ref": "BackendNodeId"
+ },
+ {
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
+ },
+ {
+ "name": "rect",
+ "description": "The rect to be scrolled into view, relative to the node's border box, in CSS pixels.\nWhen omitted, center of the node will be used, similar to Element.scrollIntoView.",
+ "optional": true,
+ "$ref": "Rect"
}
]
},
{
- "name": "setNodeName",
- "description": "Sets node name for a node with given id.",
+ "name": "disable",
+ "description": "Disables DOM agent for the given page."
+ },
+ {
+ "name": "discardSearchResults",
+ "description": "Discards search results from the session with the given id. `getSearchResults` should no longer\nbe called for that search.",
+ "experimental": true,
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node to set name for.",
- "$ref": "NodeId"
- },
- {
- "name": "name",
- "description": "New node's name.",
+ "name": "searchId",
+ "description": "Unique search session identifier.",
"type": "string"
}
- ],
- "returns": [
+ ]
+ },
+ {
+ "name": "enable",
+ "description": "Enables DOM agent for the given page.",
+ "parameters": [
{
- "name": "nodeId",
- "description": "New node's id.",
- "$ref": "NodeId"
+ "name": "includeWhitespace",
+ "description": "Whether to include whitespaces in the children array of returned Nodes.",
+ "experimental": true,
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "none",
+ "all"
+ ]
}
]
},
{
- "name": "setNodeValue",
- "description": "Sets node value for a node with given id.",
+ "name": "focus",
+ "description": "Focuses the given element.",
"parameters": [
{
"name": "nodeId",
- "description": "Id of the node to set value for.",
+ "description": "Identifier of the node.",
+ "optional": true,
"$ref": "NodeId"
},
{
- "name": "value",
- "description": "New node's value.",
- "type": "string"
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node.",
+ "optional": true,
+ "$ref": "BackendNodeId"
+ },
+ {
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
}
]
},
{
- "name": "setOuterHTML",
- "description": "Sets node HTML markup, returns new node id.",
+ "name": "getAttributes",
+ "description": "Returns attributes for the specified node.",
"parameters": [
{
"name": "nodeId",
- "description": "Id of the node to set markup for.",
+ "description": "Id of the node to retrieve attibutes for.",
"$ref": "NodeId"
- },
+ }
+ ],
+ "returns": [
{
- "name": "outerHTML",
- "description": "Outer HTML markup to set.",
- "type": "string"
+ "name": "attributes",
+ "description": "An interleaved array of node attribute names and values.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
]
},
{
- "name": "undo",
- "description": "Undoes the last performed action.",
- "experimental": true
- },
- {
- "name": "getFrameOwner",
- "description": "Returns iframe node that owns iframe with the given domain.",
- "experimental": true,
+ "name": "getBoxModel",
+ "description": "Returns boxes for the given node.",
"parameters": [
{
- "name": "frameId",
- "$ref": "Page.FrameId"
- }
- ],
- "returns": [
+ "name": "nodeId",
+ "description": "Identifier of the node.",
+ "optional": true,
+ "$ref": "NodeId"
+ },
{
"name": "backendNodeId",
- "description": "Resulting node.",
+ "description": "Identifier of the backend node.",
+ "optional": true,
"$ref": "BackendNodeId"
},
{
- "name": "nodeId",
- "description": "Id of the node at given coordinates, only when enabled and requested document.",
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
"optional": true,
- "$ref": "NodeId"
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "model",
+ "description": "Box model for the node.",
+ "$ref": "BoxModel"
}
]
- }
- ],
- "events": [
+ },
{
- "name": "attributeModified",
- "description": "Fired when `Element`'s attribute is modified.",
+ "name": "getContentQuads",
+ "description": "Returns quads that describe node position on the page. This method\nmight return multiple quads for inline nodes.",
+ "experimental": true,
"parameters": [
{
"name": "nodeId",
- "description": "Id of the node that has changed.",
+ "description": "Identifier of the node.",
+ "optional": true,
"$ref": "NodeId"
},
{
- "name": "name",
- "description": "Attribute name.",
- "type": "string"
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node.",
+ "optional": true,
+ "$ref": "BackendNodeId"
},
{
- "name": "value",
- "description": "Attribute value.",
- "type": "string"
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "quads",
+ "description": "Quads that describe node layout relative to viewport.",
+ "type": "array",
+ "items": {
+ "$ref": "Quad"
+ }
}
]
},
{
- "name": "attributeRemoved",
- "description": "Fired when `Element`'s attribute is removed.",
+ "name": "getDocument",
+ "description": "Returns the root DOM node (and optionally the subtree) to the caller.",
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node that has changed.",
- "$ref": "NodeId"
+ "name": "depth",
+ "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
+ "optional": true,
+ "type": "integer"
},
{
- "name": "name",
- "description": "A ttribute name.",
- "type": "string"
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
+ "optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
+ {
+ "name": "root",
+ "description": "Resulting node.",
+ "$ref": "Node"
}
]
},
{
- "name": "characterDataModified",
- "description": "Mirrors `DOMCharacterDataModified` event.",
+ "name": "getFlattenedDocument",
+ "description": "Returns the root DOM node (and optionally the subtree) to the caller.\nDeprecated, as it is not designed to work well with the rest of the DOM agent.\nUse DOMSnapshot.captureSnapshot instead.",
+ "deprecated": true,
"parameters": [
{
- "name": "nodeId",
- "description": "Id of the node that has changed.",
- "$ref": "NodeId"
+ "name": "depth",
+ "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
+ "optional": true,
+ "type": "integer"
},
{
- "name": "characterData",
- "description": "New text value.",
- "type": "string"
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false).",
+ "optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodes",
+ "description": "Resulting node.",
+ "type": "array",
+ "items": {
+ "$ref": "Node"
+ }
}
]
},
{
- "name": "childNodeCountUpdated",
- "description": "Fired when `Container`'s child node count has changed.",
+ "name": "getNodesForSubtreeByStyle",
+ "description": "Finds nodes with a given computed style in a subtree.",
+ "experimental": true,
"parameters": [
{
"name": "nodeId",
- "description": "Id of the node that has changed.",
+ "description": "Node ID pointing to the root of a subtree.",
"$ref": "NodeId"
},
{
- "name": "childNodeCount",
- "description": "New node count.",
- "type": "integer"
- }
- ]
- },
- {
- "name": "childNodeInserted",
- "description": "Mirrors `DOMNodeInserted` event.",
- "parameters": [
- {
- "name": "parentNodeId",
- "description": "Id of the node that has changed.",
- "$ref": "NodeId"
+ "name": "computedStyles",
+ "description": "The style to filter nodes by (includes nodes if any of properties matches).",
+ "type": "array",
+ "items": {
+ "$ref": "CSSComputedStyleProperty"
+ }
},
{
- "name": "previousNodeId",
- "description": "If of the previous siblint.",
- "$ref": "NodeId"
- },
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots in the same target should be traversed when returning the\nresults (default is false).",
+ "optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
{
- "name": "node",
- "description": "Inserted node data.",
- "$ref": "Node"
+ "name": "nodeIds",
+ "description": "Resulting nodes.",
+ "type": "array",
+ "items": {
+ "$ref": "NodeId"
+ }
}
]
},
{
- "name": "childNodeRemoved",
- "description": "Mirrors `DOMNodeRemoved` event.",
+ "name": "getNodeForLocation",
+ "description": "Returns node id at given location. Depending on whether DOM domain is enabled, nodeId is\neither returned or not.",
"parameters": [
{
- "name": "parentNodeId",
- "description": "Parent id.",
- "$ref": "NodeId"
+ "name": "x",
+ "description": "X coordinate.",
+ "type": "integer"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate.",
+ "type": "integer"
+ },
+ {
+ "name": "includeUserAgentShadowDOM",
+ "description": "False to skip to the nearest non-UA shadow root ancestor (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "ignorePointerEventsNone",
+ "description": "Whether to ignore pointer-events: none on elements and hit test them.",
+ "optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
+ {
+ "name": "backendNodeId",
+ "description": "Resulting node.",
+ "$ref": "BackendNodeId"
+ },
+ {
+ "name": "frameId",
+ "description": "Frame this node belongs to.",
+ "$ref": "Page.FrameId"
},
{
"name": "nodeId",
- "description": "Id of the node that has been removed.",
+ "description": "Id of the node at given coordinates, only when enabled and requested document.",
+ "optional": true,
"$ref": "NodeId"
}
]
},
{
- "name": "distributedNodesUpdated",
- "description": "Called when distrubution is changed.",
- "experimental": true,
+ "name": "getOuterHTML",
+ "description": "Returns node's HTML markup.",
"parameters": [
{
- "name": "insertionPointId",
- "description": "Insertion point where distrubuted nodes were updated.",
+ "name": "nodeId",
+ "description": "Identifier of the node.",
+ "optional": true,
"$ref": "NodeId"
},
{
- "name": "distributedNodes",
- "description": "Distributed nodes for given insertion point.",
- "type": "array",
- "items": {
- "$ref": "BackendNode"
- }
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node.",
+ "optional": true,
+ "$ref": "BackendNodeId"
+ },
+ {
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
+ "optional": true,
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "outerHTML",
+ "description": "Outer HTML markup.",
+ "type": "string"
}
]
},
{
- "name": "documentUpdated",
- "description": "Fired when `Document` has been totally updated. Node ids are no longer valid."
+ "name": "getRelayoutBoundary",
+ "description": "Returns the id of the nearest ancestor that is a relayout boundary.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node.",
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodeId",
+ "description": "Relayout boundary node id for the given node.",
+ "$ref": "NodeId"
+ }
+ ]
},
{
- "name": "inlineStyleInvalidated",
- "description": "Fired when `Element`'s inline style is modified via a CSS property modification.",
+ "name": "getSearchResults",
+ "description": "Returns search results from given `fromIndex` to given `toIndex` from the search with the given\nidentifier.",
"experimental": true,
"parameters": [
+ {
+ "name": "searchId",
+ "description": "Unique search session identifier.",
+ "type": "string"
+ },
+ {
+ "name": "fromIndex",
+ "description": "Start index of the search result to be returned.",
+ "type": "integer"
+ },
+ {
+ "name": "toIndex",
+ "description": "End index of the search result to be returned.",
+ "type": "integer"
+ }
+ ],
+ "returns": [
{
"name": "nodeIds",
- "description": "Ids of the nodes for which the inline styles have been invalidated.",
+ "description": "Ids of the search result nodes.",
"type": "array",
"items": {
"$ref": "NodeId"
@@ -4413,5505 +5292,7898 @@
]
},
{
- "name": "pseudoElementAdded",
- "description": "Called when a pseudo element is added to an element.",
- "experimental": true,
+ "name": "hideHighlight",
+ "description": "Hides any highlight.",
+ "redirect": "Overlay"
+ },
+ {
+ "name": "highlightNode",
+ "description": "Highlights DOM node.",
+ "redirect": "Overlay"
+ },
+ {
+ "name": "highlightRect",
+ "description": "Highlights given rectangle.",
+ "redirect": "Overlay"
+ },
+ {
+ "name": "markUndoableState",
+ "description": "Marks last undoable state.",
+ "experimental": true
+ },
+ {
+ "name": "moveTo",
+ "description": "Moves node into the new container, places it before the given anchor.",
"parameters": [
{
- "name": "parentId",
- "description": "Pseudo element's parent element id.",
+ "name": "nodeId",
+ "description": "Id of the node to move.",
"$ref": "NodeId"
},
{
- "name": "pseudoElement",
- "description": "The added pseudo element.",
- "$ref": "Node"
+ "name": "targetNodeId",
+ "description": "Id of the element to drop the moved node into.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "insertBeforeNodeId",
+ "description": "Drop node before this one (if absent, the moved node becomes the last child of\n`targetNodeId`).",
+ "optional": true,
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodeId",
+ "description": "New id of the moved node.",
+ "$ref": "NodeId"
}
]
},
{
- "name": "pseudoElementRemoved",
- "description": "Called when a pseudo element is removed from an element.",
+ "name": "performSearch",
+ "description": "Searches for a given string in the DOM tree. Use `getSearchResults` to access search results or\n`cancelSearch` to end this search session.",
"experimental": true,
"parameters": [
{
- "name": "parentId",
- "description": "Pseudo element's parent element id.",
- "$ref": "NodeId"
+ "name": "query",
+ "description": "Plain text or query selector or XPath search query.",
+ "type": "string"
},
{
- "name": "pseudoElementId",
- "description": "The removed pseudo element id.",
- "$ref": "NodeId"
+ "name": "includeUserAgentShadowDOM",
+ "description": "True to search in user agent shadow DOM.",
+ "optional": true,
+ "type": "boolean"
}
- ]
- },
- {
- "name": "setChildNodes",
- "description": "Fired when backend wants to provide client with the missing DOM structure. This happens upon\nmost of the calls requesting node ids.",
- "parameters": [
+ ],
+ "returns": [
{
- "name": "parentId",
- "description": "Parent node id to populate with children.",
- "$ref": "NodeId"
+ "name": "searchId",
+ "description": "Unique search session identifier.",
+ "type": "string"
},
{
- "name": "nodes",
- "description": "Child nodes array.",
- "type": "array",
- "items": {
- "$ref": "Node"
- }
+ "name": "resultCount",
+ "description": "Number of search results.",
+ "type": "integer"
}
]
},
{
- "name": "shadowRootPopped",
- "description": "Called when shadow root is popped from the element.",
+ "name": "pushNodeByPathToFrontend",
+ "description": "Requests that the node is sent to the caller given its path. // FIXME, use XPath",
"experimental": true,
"parameters": [
{
- "name": "hostId",
- "description": "Host element id.",
- "$ref": "NodeId"
- },
+ "name": "path",
+ "description": "Path to node in the proprietary format.",
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "rootId",
- "description": "Shadow root id.",
+ "name": "nodeId",
+ "description": "Id of the node for given path.",
"$ref": "NodeId"
}
]
},
{
- "name": "shadowRootPushed",
- "description": "Called when shadow root is pushed into the element.",
+ "name": "pushNodesByBackendIdsToFrontend",
+ "description": "Requests that a batch of nodes is sent to the caller given their backend node ids.",
"experimental": true,
"parameters": [
{
- "name": "hostId",
- "description": "Host element id.",
- "$ref": "NodeId"
- },
+ "name": "backendNodeIds",
+ "description": "The array of backend node ids.",
+ "type": "array",
+ "items": {
+ "$ref": "BackendNodeId"
+ }
+ }
+ ],
+ "returns": [
{
- "name": "root",
- "description": "Shadow root.",
- "$ref": "Node"
+ "name": "nodeIds",
+ "description": "The array of ids of pushed nodes that correspond to the backend ids specified in\nbackendNodeIds.",
+ "type": "array",
+ "items": {
+ "$ref": "NodeId"
+ }
}
]
- }
- ]
- },
- {
- "domain": "DOMDebugger",
- "description": "DOM debugging allows setting breakpoints on particular DOM operations and events. JavaScript\nexecution will stop on these operations as if there was a regular breakpoint set.",
- "dependencies": [
- "DOM",
- "Debugger",
- "Runtime"
- ],
- "types": [
- {
- "id": "DOMBreakpointType",
- "description": "DOM breakpoint type.",
- "type": "string",
- "enum": [
- "subtree-modified",
- "attribute-modified",
- "node-removed"
- ]
},
{
- "id": "EventListener",
- "description": "Object event listener.",
- "type": "object",
- "properties": [
- {
- "name": "type",
- "description": "`EventListener`'s type.",
- "type": "string"
- },
- {
- "name": "useCapture",
- "description": "`EventListener`'s useCapture.",
- "type": "boolean"
- },
- {
- "name": "passive",
- "description": "`EventListener`'s passive flag.",
- "type": "boolean"
- },
- {
- "name": "once",
- "description": "`EventListener`'s once flag.",
- "type": "boolean"
- },
- {
- "name": "scriptId",
- "description": "Script id of the handler code.",
- "$ref": "Runtime.ScriptId"
- },
- {
- "name": "lineNumber",
- "description": "Line number in the script (0-based).",
- "type": "integer"
- },
- {
- "name": "columnNumber",
- "description": "Column number in the script (0-based).",
- "type": "integer"
- },
+ "name": "querySelector",
+ "description": "Executes `querySelector` on a given node.",
+ "parameters": [
{
- "name": "handler",
- "description": "Event handler function value.",
- "optional": true,
- "$ref": "Runtime.RemoteObject"
+ "name": "nodeId",
+ "description": "Id of the node to query upon.",
+ "$ref": "NodeId"
},
{
- "name": "originalHandler",
- "description": "Event original handler function value.",
- "optional": true,
- "$ref": "Runtime.RemoteObject"
- },
+ "name": "selector",
+ "description": "Selector string.",
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "backendNodeId",
- "description": "Node the listener is added to (if any).",
- "optional": true,
- "$ref": "DOM.BackendNodeId"
+ "name": "nodeId",
+ "description": "Query selector result.",
+ "$ref": "NodeId"
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "getEventListeners",
- "description": "Returns event listeners of the given object.",
+ "name": "querySelectorAll",
+ "description": "Executes `querySelectorAll` on a given node.",
"parameters": [
{
- "name": "objectId",
- "description": "Identifier of the object to return listeners for.",
- "$ref": "Runtime.RemoteObjectId"
- },
- {
- "name": "depth",
- "description": "The maximum depth at which Node children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
- "optional": true,
- "type": "integer"
+ "name": "nodeId",
+ "description": "Id of the node to query upon.",
+ "$ref": "NodeId"
},
{
- "name": "pierce",
- "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false). Reports listeners for all contexts if pierce is enabled.",
- "optional": true,
- "type": "boolean"
+ "name": "selector",
+ "description": "Selector string.",
+ "type": "string"
}
],
"returns": [
{
- "name": "listeners",
- "description": "Array of relevant listeners.",
+ "name": "nodeIds",
+ "description": "Query selector result.",
"type": "array",
"items": {
- "$ref": "EventListener"
+ "$ref": "NodeId"
}
}
]
},
{
- "name": "removeDOMBreakpoint",
- "description": "Removes DOM breakpoint that was set using `setDOMBreakpoint`.",
+ "name": "redo",
+ "description": "Re-does the last undone action.",
+ "experimental": true
+ },
+ {
+ "name": "removeAttribute",
+ "description": "Removes attribute with given name from an element with given id.",
"parameters": [
{
"name": "nodeId",
- "description": "Identifier of the node to remove breakpoint from.",
- "$ref": "DOM.NodeId"
+ "description": "Id of the element to remove attribute from.",
+ "$ref": "NodeId"
},
{
- "name": "type",
- "description": "Type of the breakpoint to remove.",
- "$ref": "DOMBreakpointType"
+ "name": "name",
+ "description": "Name of the attribute to remove.",
+ "type": "string"
}
]
},
{
- "name": "removeEventListenerBreakpoint",
- "description": "Removes breakpoint on particular DOM event.",
+ "name": "removeNode",
+ "description": "Removes node with given id.",
"parameters": [
{
- "name": "eventName",
- "description": "Event name.",
- "type": "string"
- },
- {
- "name": "targetName",
- "description": "EventTarget interface name.",
- "experimental": true,
- "optional": true,
- "type": "string"
+ "name": "nodeId",
+ "description": "Id of the node to remove.",
+ "$ref": "NodeId"
}
]
},
{
- "name": "removeInstrumentationBreakpoint",
- "description": "Removes breakpoint on particular native event.",
- "experimental": true,
+ "name": "requestChildNodes",
+ "description": "Requests that children of the node with given id are returned to the caller in form of\n`setChildNodes` events where not only immediate children are retrieved, but all children down to\nthe specified depth.",
"parameters": [
{
- "name": "eventName",
- "description": "Instrumentation name to stop on.",
- "type": "string"
+ "name": "nodeId",
+ "description": "Id of the node to get children for.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "depth",
+ "description": "The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots should be traversed when returning the sub-tree\n(default is false).",
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "name": "removeXHRBreakpoint",
- "description": "Removes breakpoint from XMLHttpRequest.",
+ "name": "requestNode",
+ "description": "Requests that the node is sent to the caller given the JavaScript node object reference. All\nnodes that form the path from the node to the root are also sent to the client as a series of\n`setChildNodes` notifications.",
"parameters": [
{
- "name": "url",
- "description": "Resource URL substring.",
- "type": "string"
+ "name": "objectId",
+ "description": "JavaScript object id to convert into node.",
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodeId",
+ "description": "Node id for given object.",
+ "$ref": "NodeId"
}
]
},
{
- "name": "setDOMBreakpoint",
- "description": "Sets breakpoint on particular operation with DOM.",
+ "name": "resolveNode",
+ "description": "Resolves the JavaScript node object for a given NodeId or BackendNodeId.",
"parameters": [
{
"name": "nodeId",
- "description": "Identifier of the node to set breakpoint on.",
- "$ref": "DOM.NodeId"
+ "description": "Id of the node to resolve.",
+ "optional": true,
+ "$ref": "NodeId"
},
{
- "name": "type",
- "description": "Type of the operation to stop upon.",
- "$ref": "DOMBreakpointType"
- }
- ]
- },
- {
- "name": "setEventListenerBreakpoint",
- "description": "Sets breakpoint on particular DOM event.",
- "parameters": [
+ "name": "backendNodeId",
+ "description": "Backend identifier of the node to resolve.",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ },
{
- "name": "eventName",
- "description": "DOM Event name to stop on (any DOM event will do).",
+ "name": "objectGroup",
+ "description": "Symbolic group name that can be used to release multiple objects.",
+ "optional": true,
"type": "string"
},
{
- "name": "targetName",
- "description": "EventTarget interface name to stop on. If equal to `\"*\"` or not provided, will stop on any\nEventTarget.",
- "experimental": true,
+ "name": "executionContextId",
+ "description": "Execution context in which to resolve the node.",
"optional": true,
- "type": "string"
+ "$ref": "Runtime.ExecutionContextId"
}
- ]
- },
- {
- "name": "setInstrumentationBreakpoint",
- "description": "Sets breakpoint on particular native event.",
- "experimental": true,
- "parameters": [
+ ],
+ "returns": [
{
- "name": "eventName",
- "description": "Instrumentation name to stop on.",
- "type": "string"
+ "name": "object",
+ "description": "JavaScript object wrapper for given node.",
+ "$ref": "Runtime.RemoteObject"
}
]
},
{
- "name": "setXHRBreakpoint",
- "description": "Sets breakpoint on XMLHttpRequest.",
+ "name": "setAttributeValue",
+ "description": "Sets attribute for an element with given id.",
"parameters": [
{
- "name": "url",
- "description": "Resource URL substring. All XHRs having this substring in the URL will get stopped upon.",
- "type": "string"
- }
- ]
- }
- ]
- },
- {
- "domain": "DOMSnapshot",
- "description": "This domain facilitates obtaining document snapshots with DOM, layout, and style information.",
- "experimental": true,
- "dependencies": [
- "CSS",
- "DOM",
- "DOMDebugger",
- "Page"
- ],
- "types": [
- {
- "id": "DOMNode",
- "description": "A Node in the DOM tree.",
- "type": "object",
- "properties": [
- {
- "name": "nodeType",
- "description": "`Node`'s nodeType.",
- "type": "integer"
+ "name": "nodeId",
+ "description": "Id of the element to set attribute for.",
+ "$ref": "NodeId"
},
{
- "name": "nodeName",
- "description": "`Node`'s nodeName.",
+ "name": "name",
+ "description": "Attribute name.",
"type": "string"
},
{
- "name": "nodeValue",
- "description": "`Node`'s nodeValue.",
+ "name": "value",
+ "description": "Attribute value.",
"type": "string"
- },
+ }
+ ]
+ },
+ {
+ "name": "setAttributesAsText",
+ "description": "Sets attributes on element with given id. This method is useful when user edits some existing\nattribute value and types in several attribute name/value pairs.",
+ "parameters": [
{
- "name": "textValue",
- "description": "Only set for textarea elements, contains the text value.",
- "optional": true,
- "type": "string"
+ "name": "nodeId",
+ "description": "Id of the element to set attributes for.",
+ "$ref": "NodeId"
},
{
- "name": "inputValue",
- "description": "Only set for input elements, contains the input's associated text value.",
- "optional": true,
+ "name": "text",
+ "description": "Text with a number of attributes. Will parse this text using HTML parser.",
"type": "string"
},
{
- "name": "inputChecked",
- "description": "Only set for radio and checkbox input elements, indicates if the element has been checked",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "optionSelected",
- "description": "Only set for option elements, indicates if the element has been selected",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "backendNodeId",
- "description": "`Node`'s id, corresponds to DOM.Node.backendNodeId.",
- "$ref": "DOM.BackendNodeId"
- },
- {
- "name": "childNodeIndexes",
- "description": "The indexes of the node's child nodes in the `domNodes` array returned by `getSnapshot`, if\nany.",
+ "name": "name",
+ "description": "Attribute name to replace with new attributes derived from text in case text parsed\nsuccessfully.",
"optional": true,
- "type": "array",
- "items": {
- "type": "integer"
- }
- },
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "setFileInputFiles",
+ "description": "Sets files for the given file input element.",
+ "parameters": [
{
- "name": "attributes",
- "description": "Attributes of an `Element` node.",
- "optional": true,
+ "name": "files",
+ "description": "Array of file paths to set.",
"type": "array",
"items": {
- "$ref": "NameValue"
+ "type": "string"
}
},
{
- "name": "pseudoElementIndexes",
- "description": "Indexes of pseudo elements associated with this node in the `domNodes` array returned by\n`getSnapshot`, if any.",
+ "name": "nodeId",
+ "description": "Identifier of the node.",
"optional": true,
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "$ref": "NodeId"
},
{
- "name": "layoutNodeIndex",
- "description": "The index of the node's related layout tree node in the `layoutTreeNodes` array returned by\n`getSnapshot`, if any.",
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node.",
"optional": true,
- "type": "integer"
+ "$ref": "BackendNodeId"
},
{
- "name": "documentURL",
- "description": "Document URL that `Document` or `FrameOwner` node points to.",
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
"optional": true,
- "type": "string"
- },
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ]
+ },
+ {
+ "name": "setNodeStackTracesEnabled",
+ "description": "Sets if stack traces should be captured for Nodes. See `Node.getNodeStackTraces`. Default is disabled.",
+ "experimental": true,
+ "parameters": [
{
- "name": "baseURL",
- "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
- "optional": true,
- "type": "string"
- },
+ "name": "enable",
+ "description": "Enable or disable.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "getNodeStackTraces",
+ "description": "Gets stack traces associated with a Node. As of now, only provides stack trace for Node creation.",
+ "experimental": true,
+ "parameters": [
{
- "name": "contentLanguage",
- "description": "Only set for documents, contains the document's content language.",
- "optional": true,
- "type": "string"
- },
+ "name": "nodeId",
+ "description": "Id of the node to get stack traces for.",
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
{
- "name": "documentEncoding",
- "description": "Only set for documents, contains the document's character set encoding.",
+ "name": "creation",
+ "description": "Creation stack trace, if available.",
"optional": true,
- "type": "string"
- },
+ "$ref": "Runtime.StackTrace"
+ }
+ ]
+ },
+ {
+ "name": "getFileInfo",
+ "description": "Returns file information for the given\nFile wrapper.",
+ "experimental": true,
+ "parameters": [
{
- "name": "publicId",
- "description": "`DocumentType` node's publicId.",
- "optional": true,
- "type": "string"
- },
+ "name": "objectId",
+ "description": "JavaScript object id of the node wrapper.",
+ "$ref": "Runtime.RemoteObjectId"
+ }
+ ],
+ "returns": [
{
- "name": "systemId",
- "description": "`DocumentType` node's systemId.",
- "optional": true,
+ "name": "path",
"type": "string"
- },
+ }
+ ]
+ },
+ {
+ "name": "setInspectedNode",
+ "description": "Enables console to refer to the node with given id via $x (see Command Line API for more details\n$x functions).",
+ "experimental": true,
+ "parameters": [
{
- "name": "frameId",
- "description": "Frame ID for frame owner elements and also for the document node.",
- "optional": true,
- "$ref": "Page.FrameId"
- },
+ "name": "nodeId",
+ "description": "DOM node id to be accessible by means of $x command line API.",
+ "$ref": "NodeId"
+ }
+ ]
+ },
+ {
+ "name": "setNodeName",
+ "description": "Sets node name for a node with given id.",
+ "parameters": [
{
- "name": "contentDocumentIndex",
- "description": "The index of a frame owner element's content document in the `domNodes` array returned by\n`getSnapshot`, if any.",
- "optional": true,
- "type": "integer"
+ "name": "nodeId",
+ "description": "Id of the node to set name for.",
+ "$ref": "NodeId"
},
{
- "name": "pseudoType",
- "description": "Type of a pseudo element node.",
- "optional": true,
- "$ref": "DOM.PseudoType"
- },
+ "name": "name",
+ "description": "New node's name.",
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "shadowRootType",
- "description": "Shadow root type.",
- "optional": true,
- "$ref": "DOM.ShadowRootType"
- },
+ "name": "nodeId",
+ "description": "New node's id.",
+ "$ref": "NodeId"
+ }
+ ]
+ },
+ {
+ "name": "setNodeValue",
+ "description": "Sets node value for a node with given id.",
+ "parameters": [
{
- "name": "isClickable",
- "description": "Whether this DOM node responds to mouse clicks. This includes nodes that have had click\nevent listeners attached via JavaScript as well as anchor tags that naturally navigate when\nclicked.",
- "optional": true,
- "type": "boolean"
+ "name": "nodeId",
+ "description": "Id of the node to set value for.",
+ "$ref": "NodeId"
},
{
- "name": "eventListeners",
- "description": "Details of the node's event listeners, if any.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "DOMDebugger.EventListener"
- }
- },
- {
- "name": "currentSourceURL",
- "description": "The selected url for nodes with a srcset attribute.",
- "optional": true,
- "type": "string"
- },
- {
- "name": "originURL",
- "description": "The url of the script (if any) that generates this node.",
- "optional": true,
+ "name": "value",
+ "description": "New node's value.",
"type": "string"
- },
- {
- "name": "scrollOffsetX",
- "description": "Scroll offsets, set when this node is a Document.",
- "optional": true,
- "type": "number"
- },
- {
- "name": "scrollOffsetY",
- "optional": true,
- "type": "number"
}
]
},
{
- "id": "InlineTextBox",
- "description": "Details of post layout rendered text positions. The exact layout should not be regarded as\nstable and may change between versions.",
- "type": "object",
- "properties": [
- {
- "name": "boundingBox",
- "description": "The bounding box in document coordinates. Note that scroll offset of the document is ignored.",
- "$ref": "DOM.Rect"
- },
+ "name": "setOuterHTML",
+ "description": "Sets node HTML markup, returns new node id.",
+ "parameters": [
{
- "name": "startCharacterIndex",
- "description": "The starting index in characters, for this post layout textbox substring. Characters that\nwould be represented as a surrogate pair in UTF-16 have length 2.",
- "type": "integer"
+ "name": "nodeId",
+ "description": "Id of the node to set markup for.",
+ "$ref": "NodeId"
},
{
- "name": "numCharacters",
- "description": "The number of characters in this post layout textbox substring. Characters that would be\nrepresented as a surrogate pair in UTF-16 have length 2.",
- "type": "integer"
+ "name": "outerHTML",
+ "description": "Outer HTML markup to set.",
+ "type": "string"
}
]
},
{
- "id": "LayoutTreeNode",
- "description": "Details of an element in the DOM tree with a LayoutObject.",
- "type": "object",
- "properties": [
- {
- "name": "domNodeIndex",
- "description": "The index of the related DOM node in the `domNodes` array returned by `getSnapshot`.",
- "type": "integer"
- },
+ "name": "undo",
+ "description": "Undoes the last performed action.",
+ "experimental": true
+ },
+ {
+ "name": "getFrameOwner",
+ "description": "Returns iframe node that owns iframe with the given domain.",
+ "experimental": true,
+ "parameters": [
{
- "name": "boundingBox",
- "description": "The bounding box in document coordinates. Note that scroll offset of the document is ignored.",
- "$ref": "DOM.Rect"
- },
+ "name": "frameId",
+ "$ref": "Page.FrameId"
+ }
+ ],
+ "returns": [
{
- "name": "layoutText",
- "description": "Contents of the LayoutText, if any.",
- "optional": true,
- "type": "string"
+ "name": "backendNodeId",
+ "description": "Resulting node.",
+ "$ref": "BackendNodeId"
},
{
- "name": "inlineTextNodes",
- "description": "The post-layout inline text nodes, if any.",
+ "name": "nodeId",
+ "description": "Id of the node at given coordinates, only when enabled and requested document.",
"optional": true,
- "type": "array",
- "items": {
- "$ref": "InlineTextBox"
- }
- },
+ "$ref": "NodeId"
+ }
+ ]
+ },
+ {
+ "name": "getContainerForNode",
+ "description": "Returns the container of the given node based on container query conditions.\nIf containerName is given, it will find the nearest container with a matching name;\notherwise it will find the nearest container regardless of its container name.",
+ "experimental": true,
+ "parameters": [
{
- "name": "styleIndex",
- "description": "Index into the `computedStyles` array returned by `getSnapshot`.",
- "optional": true,
- "type": "integer"
+ "name": "nodeId",
+ "$ref": "NodeId"
},
{
- "name": "paintOrder",
- "description": "Global paint order index, which is determined by the stacking order of the nodes. Nodes\nthat are painted together will have the same index. Only provided if includePaintOrder in\ngetSnapshot was true.",
+ "name": "containerName",
"optional": true,
- "type": "integer"
- },
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "isStackingContext",
- "description": "Set to true to indicate the element begins a new stacking context.",
+ "name": "nodeId",
+ "description": "The container node for the given node, or null if not found.",
"optional": true,
- "type": "boolean"
+ "$ref": "NodeId"
}
]
},
{
- "id": "ComputedStyle",
- "description": "A subset of the full ComputedStyle as defined by the request whitelist.",
- "type": "object",
- "properties": [
+ "name": "getQueryingDescendantsForContainer",
+ "description": "Returns the descendants of a container query container that have\ncontainer queries against this container.",
+ "experimental": true,
+ "parameters": [
{
- "name": "properties",
- "description": "Name/value pairs of computed style properties.",
+ "name": "nodeId",
+ "description": "Id of the container node to find querying descendants from.",
+ "$ref": "NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "nodeIds",
+ "description": "Descendant nodes with container queries against the given container.",
"type": "array",
"items": {
- "$ref": "NameValue"
+ "$ref": "NodeId"
}
}
]
- },
+ }
+ ],
+ "events": [
{
- "id": "NameValue",
- "description": "A name/value pair.",
- "type": "object",
- "properties": [
+ "name": "attributeModified",
+ "description": "Fired when `Element`'s attribute is modified.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node that has changed.",
+ "$ref": "NodeId"
+ },
{
"name": "name",
- "description": "Attribute/property name.",
+ "description": "Attribute name.",
"type": "string"
},
{
"name": "value",
- "description": "Attribute/property value.",
+ "description": "Attribute value.",
"type": "string"
}
]
},
{
- "id": "StringIndex",
- "description": "Index of the string in the strings table.",
- "type": "integer"
- },
- {
- "id": "ArrayOfStrings",
- "description": "Index of the string in the strings table.",
- "type": "array",
- "items": {
- "$ref": "StringIndex"
- }
+ "name": "attributeRemoved",
+ "description": "Fired when `Element`'s attribute is removed.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node that has changed.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "name",
+ "description": "A ttribute name.",
+ "type": "string"
+ }
+ ]
},
{
- "id": "RareStringData",
- "description": "Data that is only present on rare nodes.",
- "type": "object",
- "properties": [
+ "name": "characterDataModified",
+ "description": "Mirrors `DOMCharacterDataModified` event.",
+ "parameters": [
{
- "name": "index",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "nodeId",
+ "description": "Id of the node that has changed.",
+ "$ref": "NodeId"
},
{
- "name": "value",
- "type": "array",
- "items": {
- "$ref": "StringIndex"
- }
+ "name": "characterData",
+ "description": "New text value.",
+ "type": "string"
}
]
},
{
- "id": "RareBooleanData",
- "type": "object",
- "properties": [
+ "name": "childNodeCountUpdated",
+ "description": "Fired when `Container`'s child node count has changed.",
+ "parameters": [
{
- "name": "index",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "nodeId",
+ "description": "Id of the node that has changed.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "childNodeCount",
+ "description": "New node count.",
+ "type": "integer"
}
]
},
{
- "id": "RareIntegerData",
- "type": "object",
- "properties": [
+ "name": "childNodeInserted",
+ "description": "Mirrors `DOMNodeInserted` event.",
+ "parameters": [
{
- "name": "index",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "parentNodeId",
+ "description": "Id of the node that has changed.",
+ "$ref": "NodeId"
},
{
- "name": "value",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "previousNodeId",
+ "description": "If of the previous siblint.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "node",
+ "description": "Inserted node data.",
+ "$ref": "Node"
}
]
},
{
- "id": "Rectangle",
- "type": "array",
- "items": {
- "type": "number"
- }
+ "name": "childNodeRemoved",
+ "description": "Mirrors `DOMNodeRemoved` event.",
+ "parameters": [
+ {
+ "name": "parentNodeId",
+ "description": "Parent id.",
+ "$ref": "NodeId"
+ },
+ {
+ "name": "nodeId",
+ "description": "Id of the node that has been removed.",
+ "$ref": "NodeId"
+ }
+ ]
},
{
- "id": "DocumentSnapshot",
- "description": "Document snapshot.",
- "type": "object",
- "properties": [
+ "name": "distributedNodesUpdated",
+ "description": "Called when distribution is changed.",
+ "experimental": true,
+ "parameters": [
{
- "name": "documentURL",
- "description": "Document URL that `Document` or `FrameOwner` node points to.",
- "$ref": "StringIndex"
+ "name": "insertionPointId",
+ "description": "Insertion point where distributed nodes were updated.",
+ "$ref": "NodeId"
},
{
- "name": "baseURL",
- "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
- "$ref": "StringIndex"
- },
+ "name": "distributedNodes",
+ "description": "Distributed nodes for given insertion point.",
+ "type": "array",
+ "items": {
+ "$ref": "BackendNode"
+ }
+ }
+ ]
+ },
+ {
+ "name": "documentUpdated",
+ "description": "Fired when `Document` has been totally updated. Node ids are no longer valid."
+ },
+ {
+ "name": "inlineStyleInvalidated",
+ "description": "Fired when `Element`'s inline style is modified via a CSS property modification.",
+ "experimental": true,
+ "parameters": [
{
- "name": "contentLanguage",
- "description": "Contains the document's content language.",
- "$ref": "StringIndex"
- },
+ "name": "nodeIds",
+ "description": "Ids of the nodes for which the inline styles have been invalidated.",
+ "type": "array",
+ "items": {
+ "$ref": "NodeId"
+ }
+ }
+ ]
+ },
+ {
+ "name": "pseudoElementAdded",
+ "description": "Called when a pseudo element is added to an element.",
+ "experimental": true,
+ "parameters": [
{
- "name": "encodingName",
- "description": "Contains the document's character set encoding.",
- "$ref": "StringIndex"
+ "name": "parentId",
+ "description": "Pseudo element's parent element id.",
+ "$ref": "NodeId"
},
{
- "name": "publicId",
- "description": "`DocumentType` node's publicId.",
- "$ref": "StringIndex"
- },
+ "name": "pseudoElement",
+ "description": "The added pseudo element.",
+ "$ref": "Node"
+ }
+ ]
+ },
+ {
+ "name": "pseudoElementRemoved",
+ "description": "Called when a pseudo element is removed from an element.",
+ "experimental": true,
+ "parameters": [
{
- "name": "systemId",
- "description": "`DocumentType` node's systemId.",
- "$ref": "StringIndex"
+ "name": "parentId",
+ "description": "Pseudo element's parent element id.",
+ "$ref": "NodeId"
},
{
- "name": "frameId",
- "description": "Frame ID for frame owner elements and also for the document node.",
- "$ref": "StringIndex"
+ "name": "pseudoElementId",
+ "description": "The removed pseudo element id.",
+ "$ref": "NodeId"
+ }
+ ]
+ },
+ {
+ "name": "setChildNodes",
+ "description": "Fired when backend wants to provide client with the missing DOM structure. This happens upon\nmost of the calls requesting node ids.",
+ "parameters": [
+ {
+ "name": "parentId",
+ "description": "Parent node id to populate with children.",
+ "$ref": "NodeId"
},
{
"name": "nodes",
- "description": "A table with dom nodes.",
- "$ref": "NodeTreeSnapshot"
- },
+ "description": "Child nodes array.",
+ "type": "array",
+ "items": {
+ "$ref": "Node"
+ }
+ }
+ ]
+ },
+ {
+ "name": "shadowRootPopped",
+ "description": "Called when shadow root is popped from the element.",
+ "experimental": true,
+ "parameters": [
{
- "name": "layout",
- "description": "The nodes in the layout tree.",
- "$ref": "LayoutTreeSnapshot"
+ "name": "hostId",
+ "description": "Host element id.",
+ "$ref": "NodeId"
},
{
- "name": "textBoxes",
- "description": "The post-layout inline text nodes.",
- "$ref": "TextBoxSnapshot"
- },
+ "name": "rootId",
+ "description": "Shadow root id.",
+ "$ref": "NodeId"
+ }
+ ]
+ },
+ {
+ "name": "shadowRootPushed",
+ "description": "Called when shadow root is pushed into the element.",
+ "experimental": true,
+ "parameters": [
{
- "name": "scrollOffsetX",
- "description": "Horizontal scroll offset.",
- "optional": true,
- "type": "number"
+ "name": "hostId",
+ "description": "Host element id.",
+ "$ref": "NodeId"
},
{
- "name": "scrollOffsetY",
- "description": "Vertical scroll offset.",
- "optional": true,
- "type": "number"
+ "name": "root",
+ "description": "Shadow root.",
+ "$ref": "Node"
}
]
+ }
+ ]
+ },
+ {
+ "domain": "DOMDebugger",
+ "description": "DOM debugging allows setting breakpoints on particular DOM operations and events. JavaScript\nexecution will stop on these operations as if there was a regular breakpoint set.",
+ "dependencies": [
+ "DOM",
+ "Debugger",
+ "Runtime"
+ ],
+ "types": [
+ {
+ "id": "DOMBreakpointType",
+ "description": "DOM breakpoint type.",
+ "type": "string",
+ "enum": [
+ "subtree-modified",
+ "attribute-modified",
+ "node-removed"
+ ]
},
{
- "id": "NodeTreeSnapshot",
- "description": "Table containing nodes.",
+ "id": "CSPViolationType",
+ "description": "CSP Violation type.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "trustedtype-sink-violation",
+ "trustedtype-policy-violation"
+ ]
+ },
+ {
+ "id": "EventListener",
+ "description": "Object event listener.",
"type": "object",
"properties": [
{
- "name": "parentIndex",
- "description": "Parent node index.",
- "optional": true,
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "type",
+ "description": "`EventListener`'s type.",
+ "type": "string"
},
{
- "name": "nodeType",
- "description": "`Node`'s nodeType.",
- "optional": true,
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "useCapture",
+ "description": "`EventListener`'s useCapture.",
+ "type": "boolean"
},
{
- "name": "nodeName",
- "description": "`Node`'s nodeName.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "StringIndex"
- }
+ "name": "passive",
+ "description": "`EventListener`'s passive flag.",
+ "type": "boolean"
},
{
- "name": "nodeValue",
- "description": "`Node`'s nodeValue.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "StringIndex"
- }
+ "name": "once",
+ "description": "`EventListener`'s once flag.",
+ "type": "boolean"
},
{
- "name": "backendNodeId",
- "description": "`Node`'s id, corresponds to DOM.Node.backendNodeId.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "DOM.BackendNodeId"
- }
+ "name": "scriptId",
+ "description": "Script id of the handler code.",
+ "$ref": "Runtime.ScriptId"
},
{
- "name": "attributes",
- "description": "Attributes of an `Element` node. Flatten name, value pairs.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "ArrayOfStrings"
- }
+ "name": "lineNumber",
+ "description": "Line number in the script (0-based).",
+ "type": "integer"
},
{
- "name": "textValue",
- "description": "Only set for textarea elements, contains the text value.",
- "optional": true,
- "$ref": "RareStringData"
- },
- {
- "name": "inputValue",
- "description": "Only set for input elements, contains the input's associated text value.",
- "optional": true,
- "$ref": "RareStringData"
- },
- {
- "name": "inputChecked",
- "description": "Only set for radio and checkbox input elements, indicates if the element has been checked",
- "optional": true,
- "$ref": "RareBooleanData"
+ "name": "columnNumber",
+ "description": "Column number in the script (0-based).",
+ "type": "integer"
},
{
- "name": "optionSelected",
- "description": "Only set for option elements, indicates if the element has been selected",
+ "name": "handler",
+ "description": "Event handler function value.",
"optional": true,
- "$ref": "RareBooleanData"
+ "$ref": "Runtime.RemoteObject"
},
{
- "name": "contentDocumentIndex",
- "description": "The index of the document in the list of the snapshot documents.",
+ "name": "originalHandler",
+ "description": "Event original handler function value.",
"optional": true,
- "$ref": "RareIntegerData"
+ "$ref": "Runtime.RemoteObject"
},
{
- "name": "pseudoType",
- "description": "Type of a pseudo element node.",
+ "name": "backendNodeId",
+ "description": "Node the listener is added to (if any).",
"optional": true,
- "$ref": "RareStringData"
- },
+ "$ref": "DOM.BackendNodeId"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "getEventListeners",
+ "description": "Returns event listeners of the given object.",
+ "parameters": [
{
- "name": "isClickable",
- "description": "Whether this DOM node responds to mouse clicks. This includes nodes that have had click\nevent listeners attached via JavaScript as well as anchor tags that naturally navigate when\nclicked.",
- "optional": true,
- "$ref": "RareBooleanData"
+ "name": "objectId",
+ "description": "Identifier of the object to return listeners for.",
+ "$ref": "Runtime.RemoteObjectId"
},
{
- "name": "currentSourceURL",
- "description": "The selected url for nodes with a srcset attribute.",
+ "name": "depth",
+ "description": "The maximum depth at which Node children should be retrieved, defaults to 1. Use -1 for the\nentire subtree or provide an integer larger than 0.",
"optional": true,
- "$ref": "RareStringData"
+ "type": "integer"
},
{
- "name": "originURL",
- "description": "The url of the script (if any) that generates this node.",
+ "name": "pierce",
+ "description": "Whether or not iframes and shadow roots should be traversed when returning the subtree\n(default is false). Reports listeners for all contexts if pierce is enabled.",
"optional": true,
- "$ref": "RareStringData"
+ "type": "boolean"
}
- ]
- },
- {
- "id": "LayoutTreeSnapshot",
- "description": "Table of details of an element in the DOM tree with a LayoutObject.",
- "type": "object",
- "properties": [
- {
- "name": "nodeIndex",
- "description": "Index of the corresponding node in the `NodeTreeSnapshot` array returned by `captureSnapshot`.",
- "type": "array",
- "items": {
- "type": "integer"
- }
- },
+ ],
+ "returns": [
{
- "name": "styles",
- "description": "Array of indexes specifying computed style strings, filtered according to the `computedStyles` parameter passed to `captureSnapshot`.",
+ "name": "listeners",
+ "description": "Array of relevant listeners.",
"type": "array",
"items": {
- "$ref": "ArrayOfStrings"
+ "$ref": "EventListener"
}
- },
+ }
+ ]
+ },
+ {
+ "name": "removeDOMBreakpoint",
+ "description": "Removes DOM breakpoint that was set using `setDOMBreakpoint`.",
+ "parameters": [
{
- "name": "bounds",
- "description": "The absolute position bounding box.",
- "type": "array",
- "items": {
- "$ref": "Rectangle"
- }
+ "name": "nodeId",
+ "description": "Identifier of the node to remove breakpoint from.",
+ "$ref": "DOM.NodeId"
},
{
- "name": "text",
- "description": "Contents of the LayoutText, if any.",
- "type": "array",
- "items": {
- "$ref": "StringIndex"
- }
- },
+ "name": "type",
+ "description": "Type of the breakpoint to remove.",
+ "$ref": "DOMBreakpointType"
+ }
+ ]
+ },
+ {
+ "name": "removeEventListenerBreakpoint",
+ "description": "Removes breakpoint on particular DOM event.",
+ "parameters": [
{
- "name": "stackingContexts",
- "description": "Stacking context information.",
- "$ref": "RareBooleanData"
+ "name": "eventName",
+ "description": "Event name.",
+ "type": "string"
},
{
- "name": "offsetRects",
- "description": "The offset rect of nodes. Only available when includeDOMRects is set to true",
+ "name": "targetName",
+ "description": "EventTarget interface name.",
+ "experimental": true,
"optional": true,
- "type": "array",
- "items": {
- "$ref": "Rectangle"
- }
- },
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "removeInstrumentationBreakpoint",
+ "description": "Removes breakpoint on particular native event.",
+ "experimental": true,
+ "parameters": [
{
- "name": "scrollRects",
- "description": "The scroll rect of nodes. Only available when includeDOMRects is set to true",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Rectangle"
- }
- },
+ "name": "eventName",
+ "description": "Instrumentation name to stop on.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "removeXHRBreakpoint",
+ "description": "Removes breakpoint from XMLHttpRequest.",
+ "parameters": [
{
- "name": "clientRects",
- "description": "The client rect of nodes. Only available when includeDOMRects is set to true",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Rectangle"
- }
+ "name": "url",
+ "description": "Resource URL substring.",
+ "type": "string"
}
]
},
{
- "id": "TextBoxSnapshot",
- "description": "Table of details of the post layout rendered text positions. The exact layout should not be regarded as\nstable and may change between versions.",
- "type": "object",
- "properties": [
+ "name": "setBreakOnCSPViolation",
+ "description": "Sets breakpoint on particular CSP violations.",
+ "experimental": true,
+ "parameters": [
{
- "name": "layoutIndex",
- "description": "Index of the layout tree node that owns this box collection.",
+ "name": "violationTypes",
+ "description": "CSP Violations to stop upon.",
"type": "array",
"items": {
- "type": "integer"
+ "$ref": "CSPViolationType"
}
- },
+ }
+ ]
+ },
+ {
+ "name": "setDOMBreakpoint",
+ "description": "Sets breakpoint on particular operation with DOM.",
+ "parameters": [
{
- "name": "bounds",
- "description": "The absolute position bounding box.",
- "type": "array",
- "items": {
- "$ref": "Rectangle"
- }
+ "name": "nodeId",
+ "description": "Identifier of the node to set breakpoint on.",
+ "$ref": "DOM.NodeId"
},
{
- "name": "start",
- "description": "The starting index in characters, for this post layout textbox substring. Characters that\nwould be represented as a surrogate pair in UTF-16 have length 2.",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "type",
+ "description": "Type of the operation to stop upon.",
+ "$ref": "DOMBreakpointType"
+ }
+ ]
+ },
+ {
+ "name": "setEventListenerBreakpoint",
+ "description": "Sets breakpoint on particular DOM event.",
+ "parameters": [
+ {
+ "name": "eventName",
+ "description": "DOM Event name to stop on (any DOM event will do).",
+ "type": "string"
},
{
- "name": "length",
- "description": "The number of characters in this post layout textbox substring. Characters that would be\nrepresented as a surrogate pair in UTF-16 have length 2.",
- "type": "array",
- "items": {
- "type": "integer"
- }
+ "name": "targetName",
+ "description": "EventTarget interface name to stop on. If equal to `\"*\"` or not provided, will stop on any\nEventTarget.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
}
]
- }
- ],
- "commands": [
- {
- "name": "disable",
- "description": "Disables DOM snapshot agent for the given page."
},
{
- "name": "enable",
- "description": "Enables DOM snapshot agent for the given page."
+ "name": "setInstrumentationBreakpoint",
+ "description": "Sets breakpoint on particular native event.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "eventName",
+ "description": "Instrumentation name to stop on.",
+ "type": "string"
+ }
+ ]
},
{
- "name": "getSnapshot",
- "description": "Returns a document snapshot, including the full DOM tree of the root node (including iframes,\ntemplate contents, and imported documents) in a flattened array, as well as layout and\nwhite-listed computed style information for the nodes. Shadow DOM in the returned DOM tree is\nflattened.",
- "deprecated": true,
+ "name": "setXHRBreakpoint",
+ "description": "Sets breakpoint on XMLHttpRequest.",
"parameters": [
{
- "name": "computedStyleWhitelist",
- "description": "Whitelist of computed styles to return.",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- {
- "name": "includeEventListeners",
- "description": "Whether or not to retrieve details of DOM listeners (default false).",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "includePaintOrder",
- "description": "Whether to determine and include the paint order index of LayoutTreeNodes (default false).",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "includeUserAgentShadowTree",
- "description": "Whether to include UA shadow tree in the snapshot (default false).",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
- {
- "name": "domNodes",
- "description": "The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.",
- "type": "array",
- "items": {
- "$ref": "DOMNode"
- }
- },
- {
- "name": "layoutTreeNodes",
- "description": "The nodes in the layout tree.",
- "type": "array",
- "items": {
- "$ref": "LayoutTreeNode"
- }
- },
- {
- "name": "computedStyles",
- "description": "Whitelisted ComputedStyle properties for each node in the layout tree.",
- "type": "array",
- "items": {
- "$ref": "ComputedStyle"
- }
- }
- ]
- },
- {
- "name": "captureSnapshot",
- "description": "Returns a document snapshot, including the full DOM tree of the root node (including iframes,\ntemplate contents, and imported documents) in a flattened array, as well as layout and\nwhite-listed computed style information for the nodes. Shadow DOM in the returned DOM tree is\nflattened.",
- "parameters": [
- {
- "name": "computedStyles",
- "description": "Whitelist of computed styles to return.",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- {
- "name": "includeDOMRects",
- "description": "Whether to include DOM rectangles (offsetRects, clientRects, scrollRects) into the snapshot",
- "optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
- {
- "name": "documents",
- "description": "The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.",
- "type": "array",
- "items": {
- "$ref": "DocumentSnapshot"
- }
- },
- {
- "name": "strings",
- "description": "Shared string table that all string properties refer to with indexes.",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "url",
+ "description": "Resource URL substring. All XHRs having this substring in the URL will get stopped upon.",
+ "type": "string"
}
]
}
]
},
{
- "domain": "DOMStorage",
- "description": "Query and modify DOM storage.",
+ "domain": "EventBreakpoints",
+ "description": "EventBreakpoints permits setting breakpoints on particular operations and\nevents in targets that run JavaScript but do not have a DOM.\nJavaScript execution will stop on these operations as if there was a regular\nbreakpoint set.",
"experimental": true,
- "types": [
- {
- "id": "StorageId",
- "description": "DOM Storage identifier.",
- "type": "object",
- "properties": [
- {
- "name": "securityOrigin",
- "description": "Security origin for the storage.",
- "type": "string"
- },
- {
- "name": "isLocalStorage",
- "description": "Whether the storage is local storage (not session storage).",
- "type": "boolean"
- }
- ]
- },
- {
- "id": "Item",
- "description": "DOM Storage item.",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- ],
"commands": [
{
- "name": "clear",
- "parameters": [
- {
- "name": "storageId",
- "$ref": "StorageId"
- }
- ]
- },
- {
- "name": "disable",
- "description": "Disables storage tracking, prevents storage events from being sent to the client."
- },
- {
- "name": "enable",
- "description": "Enables storage tracking, storage events will now be delivered to the client."
- },
- {
- "name": "getDOMStorageItems",
- "parameters": [
- {
- "name": "storageId",
- "$ref": "StorageId"
- }
- ],
- "returns": [
- {
- "name": "entries",
- "type": "array",
- "items": {
- "$ref": "Item"
- }
- }
- ]
- },
- {
- "name": "removeDOMStorageItem",
+ "name": "setInstrumentationBreakpoint",
+ "description": "Sets breakpoint on particular native event.",
"parameters": [
{
- "name": "storageId",
- "$ref": "StorageId"
- },
- {
- "name": "key",
+ "name": "eventName",
+ "description": "Instrumentation name to stop on.",
"type": "string"
}
]
},
{
- "name": "setDOMStorageItem",
+ "name": "removeInstrumentationBreakpoint",
+ "description": "Removes breakpoint on particular native event.",
"parameters": [
{
- "name": "storageId",
- "$ref": "StorageId"
- },
- {
- "name": "key",
- "type": "string"
- },
- {
- "name": "value",
+ "name": "eventName",
+ "description": "Instrumentation name to stop on.",
"type": "string"
}
]
}
+ ]
+ },
+ {
+ "domain": "DOMSnapshot",
+ "description": "This domain facilitates obtaining document snapshots with DOM, layout, and style information.",
+ "experimental": true,
+ "dependencies": [
+ "CSS",
+ "DOM",
+ "DOMDebugger",
+ "Page"
],
- "events": [
+ "types": [
{
- "name": "domStorageItemAdded",
- "parameters": [
+ "id": "DOMNode",
+ "description": "A Node in the DOM tree.",
+ "type": "object",
+ "properties": [
{
- "name": "storageId",
- "$ref": "StorageId"
+ "name": "nodeType",
+ "description": "`Node`'s nodeType.",
+ "type": "integer"
},
{
- "name": "key",
+ "name": "nodeName",
+ "description": "`Node`'s nodeName.",
"type": "string"
},
{
- "name": "newValue",
+ "name": "nodeValue",
+ "description": "`Node`'s nodeValue.",
"type": "string"
- }
- ]
- },
- {
- "name": "domStorageItemRemoved",
- "parameters": [
- {
- "name": "storageId",
- "$ref": "StorageId"
},
{
- "name": "key",
+ "name": "textValue",
+ "description": "Only set for textarea elements, contains the text value.",
+ "optional": true,
"type": "string"
- }
- ]
- },
- {
- "name": "domStorageItemUpdated",
- "parameters": [
- {
- "name": "storageId",
- "$ref": "StorageId"
},
{
- "name": "key",
+ "name": "inputValue",
+ "description": "Only set for input elements, contains the input's associated text value.",
+ "optional": true,
"type": "string"
},
{
- "name": "oldValue",
- "type": "string"
+ "name": "inputChecked",
+ "description": "Only set for radio and checkbox input elements, indicates if the element has been checked",
+ "optional": true,
+ "type": "boolean"
},
{
- "name": "newValue",
- "type": "string"
- }
- ]
- },
- {
- "name": "domStorageItemsCleared",
- "parameters": [
+ "name": "optionSelected",
+ "description": "Only set for option elements, indicates if the element has been selected",
+ "optional": true,
+ "type": "boolean"
+ },
{
- "name": "storageId",
- "$ref": "StorageId"
- }
- ]
- }
- ]
- },
- {
- "domain": "Database",
- "experimental": true,
- "types": [
- {
- "id": "DatabaseId",
- "description": "Unique identifier of Database object.",
- "type": "string"
- },
- {
- "id": "Database",
- "description": "Database object.",
- "type": "object",
- "properties": [
+ "name": "backendNodeId",
+ "description": "`Node`'s id, corresponds to DOM.Node.backendNodeId.",
+ "$ref": "DOM.BackendNodeId"
+ },
{
- "name": "id",
- "description": "Database ID.",
- "$ref": "DatabaseId"
+ "name": "childNodeIndexes",
+ "description": "The indexes of the node's child nodes in the `domNodes` array returned by `getSnapshot`, if\nany.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "domain",
- "description": "Database domain.",
+ "name": "attributes",
+ "description": "Attributes of an `Element` node.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "NameValue"
+ }
+ },
+ {
+ "name": "pseudoElementIndexes",
+ "description": "Indexes of pseudo elements associated with this node in the `domNodes` array returned by\n`getSnapshot`, if any.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
+ },
+ {
+ "name": "layoutNodeIndex",
+ "description": "The index of the node's related layout tree node in the `layoutTreeNodes` array returned by\n`getSnapshot`, if any.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "documentURL",
+ "description": "Document URL that `Document` or `FrameOwner` node points to.",
+ "optional": true,
"type": "string"
},
{
- "name": "name",
- "description": "Database name.",
+ "name": "baseURL",
+ "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
+ "optional": true,
"type": "string"
},
{
- "name": "version",
- "description": "Database version.",
+ "name": "contentLanguage",
+ "description": "Only set for documents, contains the document's content language.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "documentEncoding",
+ "description": "Only set for documents, contains the document's character set encoding.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "publicId",
+ "description": "`DocumentType` node's publicId.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "systemId",
+ "description": "`DocumentType` node's systemId.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "frameId",
+ "description": "Frame ID for frame owner elements and also for the document node.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "contentDocumentIndex",
+ "description": "The index of a frame owner element's content document in the `domNodes` array returned by\n`getSnapshot`, if any.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "pseudoType",
+ "description": "Type of a pseudo element node.",
+ "optional": true,
+ "$ref": "DOM.PseudoType"
+ },
+ {
+ "name": "shadowRootType",
+ "description": "Shadow root type.",
+ "optional": true,
+ "$ref": "DOM.ShadowRootType"
+ },
+ {
+ "name": "isClickable",
+ "description": "Whether this DOM node responds to mouse clicks. This includes nodes that have had click\nevent listeners attached via JavaScript as well as anchor tags that naturally navigate when\nclicked.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "eventListeners",
+ "description": "Details of the node's event listeners, if any.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "DOMDebugger.EventListener"
+ }
+ },
+ {
+ "name": "currentSourceURL",
+ "description": "The selected url for nodes with a srcset attribute.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "originURL",
+ "description": "The url of the script (if any) that generates this node.",
+ "optional": true,
"type": "string"
+ },
+ {
+ "name": "scrollOffsetX",
+ "description": "Scroll offsets, set when this node is a Document.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "scrollOffsetY",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "id": "Error",
- "description": "Database error.",
+ "id": "InlineTextBox",
+ "description": "Details of post layout rendered text positions. The exact layout should not be regarded as\nstable and may change between versions.",
"type": "object",
"properties": [
{
- "name": "message",
- "description": "Error message.",
- "type": "string"
+ "name": "boundingBox",
+ "description": "The bounding box in document coordinates. Note that scroll offset of the document is ignored.",
+ "$ref": "DOM.Rect"
},
{
- "name": "code",
- "description": "Error code.",
+ "name": "startCharacterIndex",
+ "description": "The starting index in characters, for this post layout textbox substring. Characters that\nwould be represented as a surrogate pair in UTF-16 have length 2.",
+ "type": "integer"
+ },
+ {
+ "name": "numCharacters",
+ "description": "The number of characters in this post layout textbox substring. Characters that would be\nrepresented as a surrogate pair in UTF-16 have length 2.",
"type": "integer"
}
]
- }
- ],
- "commands": [
- {
- "name": "disable",
- "description": "Disables database tracking, prevents database events from being sent to the client."
- },
- {
- "name": "enable",
- "description": "Enables database tracking, database events will now be delivered to the client."
},
{
- "name": "executeSQL",
- "parameters": [
+ "id": "LayoutTreeNode",
+ "description": "Details of an element in the DOM tree with a LayoutObject.",
+ "type": "object",
+ "properties": [
{
- "name": "databaseId",
- "$ref": "DatabaseId"
+ "name": "domNodeIndex",
+ "description": "The index of the related DOM node in the `domNodes` array returned by `getSnapshot`.",
+ "type": "integer"
},
{
- "name": "query",
+ "name": "boundingBox",
+ "description": "The bounding box in document coordinates. Note that scroll offset of the document is ignored.",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "layoutText",
+ "description": "Contents of the LayoutText, if any.",
+ "optional": true,
"type": "string"
- }
- ],
- "returns": [
+ },
{
- "name": "columnNames",
+ "name": "inlineTextNodes",
+ "description": "The post-layout inline text nodes, if any.",
"optional": true,
"type": "array",
"items": {
- "type": "string"
+ "$ref": "InlineTextBox"
}
},
{
- "name": "values",
+ "name": "styleIndex",
+ "description": "Index into the `computedStyles` array returned by `getSnapshot`.",
"optional": true,
- "type": "array",
- "items": {
- "type": "any"
- }
+ "type": "integer"
},
{
- "name": "sqlError",
+ "name": "paintOrder",
+ "description": "Global paint order index, which is determined by the stacking order of the nodes. Nodes\nthat are painted together will have the same index. Only provided if includePaintOrder in\ngetSnapshot was true.",
"optional": true,
- "$ref": "Error"
+ "type": "integer"
+ },
+ {
+ "name": "isStackingContext",
+ "description": "Set to true to indicate the element begins a new stacking context.",
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "name": "getDatabaseTableNames",
- "parameters": [
- {
- "name": "databaseId",
- "$ref": "DatabaseId"
- }
- ],
- "returns": [
+ "id": "ComputedStyle",
+ "description": "A subset of the full ComputedStyle as defined by the request whitelist.",
+ "type": "object",
+ "properties": [
{
- "name": "tableNames",
+ "name": "properties",
+ "description": "Name/value pairs of computed style properties.",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "NameValue"
}
}
]
- }
- ],
- "events": [
+ },
{
- "name": "addDatabase",
- "parameters": [
+ "id": "NameValue",
+ "description": "A name/value pair.",
+ "type": "object",
+ "properties": [
{
- "name": "database",
- "$ref": "Database"
+ "name": "name",
+ "description": "Attribute/property name.",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "description": "Attribute/property value.",
+ "type": "string"
}
]
- }
- ]
- },
- {
- "domain": "DeviceOrientation",
- "experimental": true,
- "commands": [
+ },
{
- "name": "clearDeviceOrientationOverride",
- "description": "Clears the overridden Device Orientation."
+ "id": "StringIndex",
+ "description": "Index of the string in the strings table.",
+ "type": "integer"
},
{
- "name": "setDeviceOrientationOverride",
- "description": "Overrides the Device Orientation.",
- "parameters": [
- {
- "name": "alpha",
- "description": "Mock alpha",
- "type": "number"
- },
- {
- "name": "beta",
- "description": "Mock beta",
- "type": "number"
- },
- {
- "name": "gamma",
- "description": "Mock gamma",
- "type": "number"
- }
- ]
- }
- ]
- },
- {
- "domain": "Emulation",
- "description": "This domain emulates different environments for the page.",
- "dependencies": [
- "DOM",
- "Page",
- "Runtime"
- ],
- "types": [
+ "id": "ArrayOfStrings",
+ "description": "Index of the string in the strings table.",
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
+ },
{
- "id": "ScreenOrientation",
- "description": "Screen orientation.",
+ "id": "RareStringData",
+ "description": "Data that is only present on rare nodes.",
"type": "object",
"properties": [
{
- "name": "type",
- "description": "Orientation type.",
- "type": "string",
- "enum": [
- "portraitPrimary",
- "portraitSecondary",
- "landscapePrimary",
- "landscapeSecondary"
- ]
+ "name": "index",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "angle",
- "description": "Orientation angle.",
- "type": "integer"
+ "name": "value",
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
}
]
},
{
- "id": "VirtualTimePolicy",
- "description": "advance: If the scheduler runs out of immediate work, the virtual time base may fast forward to\nallow the next delayed task (if any) to run; pause: The virtual time base may not advance;\npauseIfNetworkFetchesPending: The virtual time base may not advance if there are any pending\nresource fetches.",
- "experimental": true,
- "type": "string",
- "enum": [
- "advance",
- "pause",
- "pauseIfNetworkFetchesPending"
- ]
- }
- ],
- "commands": [
- {
- "name": "canEmulate",
- "description": "Tells whether emulation is supported.",
- "returns": [
+ "id": "RareBooleanData",
+ "type": "object",
+ "properties": [
{
- "name": "result",
- "description": "True if emulation is supported.",
- "type": "boolean"
+ "name": "index",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
}
]
},
{
- "name": "clearDeviceMetricsOverride",
- "description": "Clears the overriden device metrics."
- },
- {
- "name": "clearGeolocationOverride",
- "description": "Clears the overriden Geolocation Position and Error."
- },
- {
- "name": "resetPageScaleFactor",
- "description": "Requests that page scale factor is reset to initial values.",
- "experimental": true
- },
- {
- "name": "setFocusEmulationEnabled",
- "description": "Enables or disables simulating a focused and active page.",
- "experimental": true,
- "parameters": [
+ "id": "RareIntegerData",
+ "type": "object",
+ "properties": [
{
- "name": "enabled",
- "description": "Whether to enable to disable focus emulation.",
- "type": "boolean"
- }
- ]
- },
- {
- "name": "setCPUThrottlingRate",
- "description": "Enables CPU throttling to emulate slow CPUs.",
- "experimental": true,
- "parameters": [
+ "name": "index",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
+ },
{
- "name": "rate",
- "description": "Throttling rate as a slowdown factor (1 is no throttle, 2 is 2x slowdown, etc).",
- "type": "number"
+ "name": "value",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
}
]
},
{
- "name": "setDefaultBackgroundColorOverride",
- "description": "Sets or clears an override of the default background color of the frame. This override is used\nif the content does not specify one.",
- "parameters": [
- {
- "name": "color",
- "description": "RGBA of the default background color. If not specified, any existing override will be\ncleared.",
- "optional": true,
- "$ref": "DOM.RGBA"
- }
- ]
+ "id": "Rectangle",
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
},
{
- "name": "setDeviceMetricsOverride",
- "description": "Overrides the values of device screen dimensions (window.screen.width, window.screen.height,\nwindow.innerWidth, window.innerHeight, and \"device-width\"/\"device-height\"-related CSS media\nquery results).",
- "parameters": [
- {
- "name": "width",
- "description": "Overriding width value in pixels (minimum 0, maximum 10000000). 0 disables the override.",
- "type": "integer"
- },
+ "id": "DocumentSnapshot",
+ "description": "Document snapshot.",
+ "type": "object",
+ "properties": [
{
- "name": "height",
- "description": "Overriding height value in pixels (minimum 0, maximum 10000000). 0 disables the override.",
- "type": "integer"
+ "name": "documentURL",
+ "description": "Document URL that `Document` or `FrameOwner` node points to.",
+ "$ref": "StringIndex"
},
{
- "name": "deviceScaleFactor",
- "description": "Overriding device scale factor value. 0 disables the override.",
- "type": "number"
+ "name": "title",
+ "description": "Document title.",
+ "$ref": "StringIndex"
},
{
- "name": "mobile",
- "description": "Whether to emulate mobile device. This includes viewport meta tag, overlay scrollbars, text\nautosizing and more.",
- "type": "boolean"
+ "name": "baseURL",
+ "description": "Base URL that `Document` or `FrameOwner` node uses for URL completion.",
+ "$ref": "StringIndex"
},
{
- "name": "scale",
- "description": "Scale to apply to resulting view image.",
- "experimental": true,
- "optional": true,
- "type": "number"
+ "name": "contentLanguage",
+ "description": "Contains the document's content language.",
+ "$ref": "StringIndex"
},
{
- "name": "screenWidth",
- "description": "Overriding screen width value in pixels (minimum 0, maximum 10000000).",
- "experimental": true,
- "optional": true,
- "type": "integer"
+ "name": "encodingName",
+ "description": "Contains the document's character set encoding.",
+ "$ref": "StringIndex"
},
{
- "name": "screenHeight",
- "description": "Overriding screen height value in pixels (minimum 0, maximum 10000000).",
- "experimental": true,
- "optional": true,
- "type": "integer"
+ "name": "publicId",
+ "description": "`DocumentType` node's publicId.",
+ "$ref": "StringIndex"
},
{
- "name": "positionX",
- "description": "Overriding view X position on screen in pixels (minimum 0, maximum 10000000).",
- "experimental": true,
- "optional": true,
- "type": "integer"
+ "name": "systemId",
+ "description": "`DocumentType` node's systemId.",
+ "$ref": "StringIndex"
},
{
- "name": "positionY",
- "description": "Overriding view Y position on screen in pixels (minimum 0, maximum 10000000).",
- "experimental": true,
- "optional": true,
- "type": "integer"
+ "name": "frameId",
+ "description": "Frame ID for frame owner elements and also for the document node.",
+ "$ref": "StringIndex"
},
{
- "name": "dontSetVisibleSize",
- "description": "Do not set visible view size, rely upon explicit setVisibleSize call.",
- "experimental": true,
- "optional": true,
- "type": "boolean"
+ "name": "nodes",
+ "description": "A table with dom nodes.",
+ "$ref": "NodeTreeSnapshot"
},
{
- "name": "screenOrientation",
- "description": "Screen orientation override.",
- "optional": true,
- "$ref": "ScreenOrientation"
+ "name": "layout",
+ "description": "The nodes in the layout tree.",
+ "$ref": "LayoutTreeSnapshot"
},
{
- "name": "viewport",
- "description": "If set, the visible area of the page will be overridden to this viewport. This viewport\nchange is not observed by the page, e.g. viewport-relative elements do not change positions.",
- "experimental": true,
- "optional": true,
- "$ref": "Page.Viewport"
- }
- ]
- },
- {
- "name": "setScrollbarsHidden",
- "experimental": true,
- "parameters": [
+ "name": "textBoxes",
+ "description": "The post-layout inline text nodes.",
+ "$ref": "TextBoxSnapshot"
+ },
{
- "name": "hidden",
- "description": "Whether scrollbars should be always hidden.",
- "type": "boolean"
- }
- ]
- },
- {
- "name": "setDocumentCookieDisabled",
- "experimental": true,
- "parameters": [
+ "name": "scrollOffsetX",
+ "description": "Horizontal scroll offset.",
+ "optional": true,
+ "type": "number"
+ },
{
- "name": "disabled",
- "description": "Whether document.coookie API should be disabled.",
- "type": "boolean"
- }
- ]
- },
- {
- "name": "setEmitTouchEventsForMouse",
- "experimental": true,
- "parameters": [
+ "name": "scrollOffsetY",
+ "description": "Vertical scroll offset.",
+ "optional": true,
+ "type": "number"
+ },
{
- "name": "enabled",
- "description": "Whether touch emulation based on mouse input should be enabled.",
- "type": "boolean"
+ "name": "contentWidth",
+ "description": "Document content width.",
+ "optional": true,
+ "type": "number"
},
{
- "name": "configuration",
- "description": "Touch/gesture events configuration. Default: current platform.",
+ "name": "contentHeight",
+ "description": "Document content height.",
"optional": true,
- "type": "string",
- "enum": [
- "mobile",
- "desktop"
- ]
+ "type": "number"
}
]
},
{
- "name": "setEmulatedMedia",
- "description": "Emulates the given media for CSS media queries.",
- "parameters": [
+ "id": "NodeTreeSnapshot",
+ "description": "Table containing nodes.",
+ "type": "object",
+ "properties": [
{
- "name": "media",
- "description": "Media type to emulate. Empty string disables the override.",
- "type": "string"
- }
- ]
- },
- {
- "name": "setGeolocationOverride",
- "description": "Overrides the Geolocation Position or Error. Omitting any of the parameters emulates position\nunavailable.",
- "parameters": [
+ "name": "parentIndex",
+ "description": "Parent node index.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
+ },
{
- "name": "latitude",
- "description": "Mock latitude",
+ "name": "nodeType",
+ "description": "`Node`'s nodeType.",
"optional": true,
- "type": "number"
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "longitude",
- "description": "Mock longitude",
+ "name": "shadowRootType",
+ "description": "Type of the shadow root the `Node` is in. String values are equal to the `ShadowRootType` enum.",
"optional": true,
- "type": "number"
+ "$ref": "RareStringData"
},
{
- "name": "accuracy",
- "description": "Mock accuracy",
+ "name": "nodeName",
+ "description": "`Node`'s nodeName.",
"optional": true,
- "type": "number"
- }
- ]
- },
- {
- "name": "setNavigatorOverrides",
- "description": "Overrides value returned by the javascript navigator object.",
- "experimental": true,
- "deprecated": true,
- "parameters": [
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
+ },
{
- "name": "platform",
- "description": "The platform navigator.platform should return.",
- "type": "string"
- }
- ]
- },
- {
- "name": "setPageScaleFactor",
- "description": "Sets a specified page scale factor.",
- "experimental": true,
- "parameters": [
+ "name": "nodeValue",
+ "description": "`Node`'s nodeValue.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
+ },
{
- "name": "pageScaleFactor",
- "description": "Page scale factor.",
- "type": "number"
- }
- ]
- },
- {
- "name": "setScriptExecutionDisabled",
- "description": "Switches script execution in the page.",
- "parameters": [
+ "name": "backendNodeId",
+ "description": "`Node`'s id, corresponds to DOM.Node.backendNodeId.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "DOM.BackendNodeId"
+ }
+ },
{
- "name": "value",
- "description": "Whether script execution should be disabled in the page.",
- "type": "boolean"
- }
- ]
- },
- {
- "name": "setTouchEmulationEnabled",
- "description": "Enables touch on platforms which do not support them.",
- "parameters": [
+ "name": "attributes",
+ "description": "Attributes of an `Element` node. Flatten name, value pairs.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "ArrayOfStrings"
+ }
+ },
{
- "name": "enabled",
- "description": "Whether the touch event emulation should be enabled.",
- "type": "boolean"
+ "name": "textValue",
+ "description": "Only set for textarea elements, contains the text value.",
+ "optional": true,
+ "$ref": "RareStringData"
},
{
- "name": "maxTouchPoints",
- "description": "Maximum touch points supported. Defaults to one.",
+ "name": "inputValue",
+ "description": "Only set for input elements, contains the input's associated text value.",
"optional": true,
- "type": "integer"
- }
- ]
- },
- {
- "name": "setVirtualTimePolicy",
- "description": "Turns on virtual time for all frames (replacing real-time with a synthetic time source) and sets\nthe current virtual time policy. Note this supersedes any previous time budget.",
- "experimental": true,
- "parameters": [
+ "$ref": "RareStringData"
+ },
{
- "name": "policy",
- "$ref": "VirtualTimePolicy"
+ "name": "inputChecked",
+ "description": "Only set for radio and checkbox input elements, indicates if the element has been checked",
+ "optional": true,
+ "$ref": "RareBooleanData"
},
{
- "name": "budget",
- "description": "If set, after this many virtual milliseconds have elapsed virtual time will be paused and a\nvirtualTimeBudgetExpired event is sent.",
+ "name": "optionSelected",
+ "description": "Only set for option elements, indicates if the element has been selected",
"optional": true,
- "type": "number"
+ "$ref": "RareBooleanData"
},
{
- "name": "maxVirtualTimeTaskStarvationCount",
- "description": "If set this specifies the maximum number of tasks that can be run before virtual is forced\nforwards to prevent deadlock.",
+ "name": "contentDocumentIndex",
+ "description": "The index of the document in the list of the snapshot documents.",
"optional": true,
- "type": "integer"
+ "$ref": "RareIntegerData"
},
{
- "name": "waitForNavigation",
- "description": "If set the virtual time policy change should be deferred until any frame starts navigating.\nNote any previous deferred policy change is superseded.",
+ "name": "pseudoType",
+ "description": "Type of a pseudo element node.",
"optional": true,
- "type": "boolean"
+ "$ref": "RareStringData"
},
{
- "name": "initialVirtualTime",
- "description": "If set, base::Time::Now will be overriden to initially return this value.",
+ "name": "isClickable",
+ "description": "Whether this DOM node responds to mouse clicks. This includes nodes that have had click\nevent listeners attached via JavaScript as well as anchor tags that naturally navigate when\nclicked.",
"optional": true,
- "$ref": "Network.TimeSinceEpoch"
- }
- ],
- "returns": [
+ "$ref": "RareBooleanData"
+ },
{
- "name": "virtualTimeTicksBase",
- "description": "Absolute timestamp at which virtual time was first enabled (up time in milliseconds).",
- "type": "number"
- }
- ]
- },
- {
- "name": "setTimezoneOverride",
- "description": "Overrides default host system timezone with the specified one.",
- "experimental": true,
- "parameters": [
+ "name": "currentSourceURL",
+ "description": "The selected url for nodes with a srcset attribute.",
+ "optional": true,
+ "$ref": "RareStringData"
+ },
{
- "name": "timezoneId",
- "description": "The timezone identifier. If empty, disables the override and\nrestores default host system timezone.",
- "type": "string"
+ "name": "originURL",
+ "description": "The url of the script (if any) that generates this node.",
+ "optional": true,
+ "$ref": "RareStringData"
}
]
},
{
- "name": "setVisibleSize",
- "description": "Resizes the frame/viewport of the page. Note that this does not affect the frame's container\n(e.g. browser window). Can be used to produce screenshots of the specified size. Not supported\non Android.",
- "experimental": true,
- "deprecated": true,
- "parameters": [
+ "id": "LayoutTreeSnapshot",
+ "description": "Table of details of an element in the DOM tree with a LayoutObject.",
+ "type": "object",
+ "properties": [
{
- "name": "width",
- "description": "Frame width (DIP).",
- "type": "integer"
+ "name": "nodeIndex",
+ "description": "Index of the corresponding node in the `NodeTreeSnapshot` array returned by `captureSnapshot`.",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "height",
- "description": "Frame height (DIP).",
- "type": "integer"
- }
- ]
- },
- {
- "name": "setUserAgentOverride",
- "description": "Allows overriding user agent with the given string.",
- "parameters": [
+ "name": "styles",
+ "description": "Array of indexes specifying computed style strings, filtered according to the `computedStyles` parameter passed to `captureSnapshot`.",
+ "type": "array",
+ "items": {
+ "$ref": "ArrayOfStrings"
+ }
+ },
{
- "name": "userAgent",
- "description": "User agent to use.",
- "type": "string"
+ "name": "bounds",
+ "description": "The absolute position bounding box.",
+ "type": "array",
+ "items": {
+ "$ref": "Rectangle"
+ }
},
{
- "name": "acceptLanguage",
- "description": "Browser langugage to emulate.",
+ "name": "text",
+ "description": "Contents of the LayoutText, if any.",
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
+ },
+ {
+ "name": "stackingContexts",
+ "description": "Stacking context information.",
+ "$ref": "RareBooleanData"
+ },
+ {
+ "name": "paintOrders",
+ "description": "Global paint order index, which is determined by the stacking order of the nodes. Nodes\nthat are painted together will have the same index. Only provided if includePaintOrder in\ncaptureSnapshot was true.",
"optional": true,
- "type": "string"
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "platform",
- "description": "The platform navigator.platform should return.",
+ "name": "offsetRects",
+ "description": "The offset rect of nodes. Only available when includeDOMRects is set to true",
"optional": true,
- "type": "string"
+ "type": "array",
+ "items": {
+ "$ref": "Rectangle"
+ }
+ },
+ {
+ "name": "scrollRects",
+ "description": "The scroll rect of nodes. Only available when includeDOMRects is set to true",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "Rectangle"
+ }
+ },
+ {
+ "name": "clientRects",
+ "description": "The client rect of nodes. Only available when includeDOMRects is set to true",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "Rectangle"
+ }
+ },
+ {
+ "name": "blendedBackgroundColors",
+ "description": "The list of background colors that are blended with colors of overlapping elements.",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "StringIndex"
+ }
+ },
+ {
+ "name": "textColorOpacities",
+ "description": "The list of computed text opacities.",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
}
]
- }
- ],
- "events": [
- {
- "name": "virtualTimeBudgetExpired",
- "description": "Notification sent after the virtual time budget for the current VirtualTimePolicy has run out.",
- "experimental": true
- }
- ]
- },
- {
- "domain": "HeadlessExperimental",
- "description": "This domain provides experimental commands only supported in headless mode.",
- "experimental": true,
- "dependencies": [
- "Page",
- "Runtime"
- ],
- "types": [
+ },
{
- "id": "ScreenshotParams",
- "description": "Encoding options for a screenshot.",
+ "id": "TextBoxSnapshot",
+ "description": "Table of details of the post layout rendered text positions. The exact layout should not be regarded as\nstable and may change between versions.",
"type": "object",
"properties": [
{
- "name": "format",
- "description": "Image compression format (defaults to png).",
- "optional": true,
- "type": "string",
- "enum": [
- "jpeg",
- "png"
- ]
+ "name": "layoutIndex",
+ "description": "Index of the layout tree node that owns this box collection.",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
},
{
- "name": "quality",
- "description": "Compression quality from range [0..100] (jpeg only).",
- "optional": true,
- "type": "integer"
+ "name": "bounds",
+ "description": "The absolute position bounding box.",
+ "type": "array",
+ "items": {
+ "$ref": "Rectangle"
+ }
+ },
+ {
+ "name": "start",
+ "description": "The starting index in characters, for this post layout textbox substring. Characters that\nwould be represented as a surrogate pair in UTF-16 have length 2.",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
+ },
+ {
+ "name": "length",
+ "description": "The number of characters in this post layout textbox substring. Characters that would be\nrepresented as a surrogate pair in UTF-16 have length 2.",
+ "type": "array",
+ "items": {
+ "type": "integer"
+ }
}
]
}
],
"commands": [
{
- "name": "beginFrame",
- "description": "Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a\nscreenshot from the resulting frame. Requires that the target was created with enabled\nBeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also\nhttps://goo.gl/3zHXhB for more background.",
+ "name": "disable",
+ "description": "Disables DOM snapshot agent for the given page."
+ },
+ {
+ "name": "enable",
+ "description": "Enables DOM snapshot agent for the given page."
+ },
+ {
+ "name": "getSnapshot",
+ "description": "Returns a document snapshot, including the full DOM tree of the root node (including iframes,\ntemplate contents, and imported documents) in a flattened array, as well as layout and\nwhite-listed computed style information for the nodes. Shadow DOM in the returned DOM tree is\nflattened.",
+ "deprecated": true,
"parameters": [
{
- "name": "frameTimeTicks",
- "description": "Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set,\nthe current time will be used.",
- "optional": true,
- "type": "number"
+ "name": "computedStyleWhitelist",
+ "description": "Whitelist of computed styles to return.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
},
{
- "name": "interval",
- "description": "The interval between BeginFrames that is reported to the compositor, in milliseconds.\nDefaults to a 60 frames/second interval, i.e. about 16.666 milliseconds.",
+ "name": "includeEventListeners",
+ "description": "Whether or not to retrieve details of DOM listeners (default false).",
"optional": true,
- "type": "number"
+ "type": "boolean"
},
{
- "name": "noDisplayUpdates",
- "description": "Whether updates should not be committed and drawn onto the display. False by default. If\ntrue, only side effects of the BeginFrame will be run, such as layout and animations, but\nany visual updates may not be visible on the display or in screenshots.",
+ "name": "includePaintOrder",
+ "description": "Whether to determine and include the paint order index of LayoutTreeNodes (default false).",
"optional": true,
"type": "boolean"
},
{
- "name": "screenshot",
- "description": "If set, a screenshot of the frame will be captured and returned in the response. Otherwise,\nno screenshot will be captured. Note that capturing a screenshot can fail, for example,\nduring renderer initialization. In such a case, no screenshot data will be returned.",
+ "name": "includeUserAgentShadowTree",
+ "description": "Whether to include UA shadow tree in the snapshot (default false).",
"optional": true,
- "$ref": "ScreenshotParams"
+ "type": "boolean"
}
],
"returns": [
{
- "name": "hasDamage",
- "description": "Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the\ndisplay. Reported for diagnostic uses, may be removed in the future.",
- "type": "boolean"
+ "name": "domNodes",
+ "description": "The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.",
+ "type": "array",
+ "items": {
+ "$ref": "DOMNode"
+ }
},
{
- "name": "screenshotData",
- "description": "Base64-encoded image data of the screenshot, if one was requested and successfully taken.",
- "optional": true,
- "type": "string"
+ "name": "layoutTreeNodes",
+ "description": "The nodes in the layout tree.",
+ "type": "array",
+ "items": {
+ "$ref": "LayoutTreeNode"
+ }
+ },
+ {
+ "name": "computedStyles",
+ "description": "Whitelisted ComputedStyle properties for each node in the layout tree.",
+ "type": "array",
+ "items": {
+ "$ref": "ComputedStyle"
+ }
}
]
},
{
- "name": "disable",
- "description": "Disables headless events for the target."
- },
- {
- "name": "enable",
- "description": "Enables headless events for the target."
- }
- ],
- "events": [
- {
- "name": "needsBeginFramesChanged",
- "description": "Issued when the target starts or stops needing BeginFrames.",
+ "name": "captureSnapshot",
+ "description": "Returns a document snapshot, including the full DOM tree of the root node (including iframes,\ntemplate contents, and imported documents) in a flattened array, as well as layout and\nwhite-listed computed style information for the nodes. Shadow DOM in the returned DOM tree is\nflattened.",
"parameters": [
{
- "name": "needsBeginFrames",
- "description": "True if BeginFrames are needed, false otherwise.",
+ "name": "computedStyles",
+ "description": "Whitelist of computed styles to return.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "includePaintOrder",
+ "description": "Whether to include layout object paint orders into the snapshot.",
+ "optional": true,
"type": "boolean"
- }
- ]
- }
- ]
- },
- {
- "domain": "IO",
- "description": "Input/Output operations for streams produced by DevTools.",
- "types": [
- {
- "id": "StreamHandle",
- "description": "This is either obtained from another method or specifed as `blob:<uuid>` where\n`<uuid>` is an UUID of a Blob.",
- "type": "string"
- }
- ],
- "commands": [
- {
- "name": "close",
- "description": "Close the stream, discard any temporary backing storage.",
- "parameters": [
- {
- "name": "handle",
- "description": "Handle of the stream to close.",
- "$ref": "StreamHandle"
- }
- ]
- },
- {
- "name": "read",
- "description": "Read a chunk of the stream",
- "parameters": [
- {
- "name": "handle",
- "description": "Handle of the stream to read.",
- "$ref": "StreamHandle"
},
{
- "name": "offset",
- "description": "Seek to the specified offset before reading (if not specificed, proceed with offset\nfollowing the last read). Some types of streams may only support sequential reads.",
+ "name": "includeDOMRects",
+ "description": "Whether to include DOM rectangles (offsetRects, clientRects, scrollRects) into the snapshot",
"optional": true,
- "type": "integer"
+ "type": "boolean"
},
{
- "name": "size",
- "description": "Maximum number of bytes to read (left upon the agent discretion if not specified).",
- "optional": true,
- "type": "integer"
- }
- ],
- "returns": [
- {
- "name": "base64Encoded",
- "description": "Set if the data is base64-encoded",
+ "name": "includeBlendedBackgroundColors",
+ "description": "Whether to include blended background colors in the snapshot (default: false).\nBlended background color is achieved by blending background colors of all elements\nthat overlap with the current element.",
+ "experimental": true,
"optional": true,
"type": "boolean"
},
{
- "name": "data",
- "description": "Data that were read.",
- "type": "string"
- },
- {
- "name": "eof",
- "description": "Set if the end-of-file condition occured while reading.",
+ "name": "includeTextColorOpacities",
+ "description": "Whether to include text color opacity in the snapshot (default: false).\nAn element might have the opacity property set that affects the text color of the element.\nThe final text color opacity is computed based on the opacity of all overlapping elements.",
+ "experimental": true,
+ "optional": true,
"type": "boolean"
}
- ]
- },
- {
- "name": "resolveBlob",
- "description": "Return UUID of Blob object specified by a remote object id.",
- "parameters": [
- {
- "name": "objectId",
- "description": "Object id of a Blob object wrapper.",
- "$ref": "Runtime.RemoteObjectId"
- }
],
"returns": [
{
- "name": "uuid",
- "description": "UUID of the specified Blob.",
- "type": "string"
+ "name": "documents",
+ "description": "The nodes in the DOM tree. The DOMNode at index 0 corresponds to the root document.",
+ "type": "array",
+ "items": {
+ "$ref": "DocumentSnapshot"
+ }
+ },
+ {
+ "name": "strings",
+ "description": "Shared string table that all string properties refer to with indexes.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
]
}
]
},
{
- "domain": "IndexedDB",
+ "domain": "DOMStorage",
+ "description": "Query and modify DOM storage.",
"experimental": true,
- "dependencies": [
- "Runtime"
- ],
"types": [
{
- "id": "DatabaseWithObjectStores",
- "description": "Database with an array of object stores.",
+ "id": "StorageId",
+ "description": "DOM Storage identifier.",
"type": "object",
"properties": [
{
- "name": "name",
- "description": "Database name.",
+ "name": "securityOrigin",
+ "description": "Security origin for the storage.",
"type": "string"
},
{
- "name": "version",
- "description": "Database version (type is not 'integer', as the standard\nrequires the version number to be 'unsigned long long')",
- "type": "number"
- },
- {
- "name": "objectStores",
- "description": "Object stores in this database.",
- "type": "array",
- "items": {
- "$ref": "ObjectStore"
- }
+ "name": "isLocalStorage",
+ "description": "Whether the storage is local storage (not session storage).",
+ "type": "boolean"
}
]
},
{
- "id": "ObjectStore",
- "description": "Object store.",
- "type": "object",
- "properties": [
- {
- "name": "name",
- "description": "Object store name.",
- "type": "string"
- },
+ "id": "Item",
+ "description": "DOM Storage item.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ],
+ "commands": [
+ {
+ "name": "clear",
+ "parameters": [
{
- "name": "keyPath",
- "description": "Object store key path.",
- "$ref": "KeyPath"
- },
+ "name": "storageId",
+ "$ref": "StorageId"
+ }
+ ]
+ },
+ {
+ "name": "disable",
+ "description": "Disables storage tracking, prevents storage events from being sent to the client."
+ },
+ {
+ "name": "enable",
+ "description": "Enables storage tracking, storage events will now be delivered to the client."
+ },
+ {
+ "name": "getDOMStorageItems",
+ "parameters": [
{
- "name": "autoIncrement",
- "description": "If true, object store has auto increment flag set.",
- "type": "boolean"
- },
+ "name": "storageId",
+ "$ref": "StorageId"
+ }
+ ],
+ "returns": [
{
- "name": "indexes",
- "description": "Indexes in this object store.",
+ "name": "entries",
"type": "array",
"items": {
- "$ref": "ObjectStoreIndex"
+ "$ref": "Item"
}
}
]
},
{
- "id": "ObjectStoreIndex",
- "description": "Object store index.",
- "type": "object",
- "properties": [
+ "name": "removeDOMStorageItem",
+ "parameters": [
{
- "name": "name",
- "description": "Index name.",
- "type": "string"
+ "name": "storageId",
+ "$ref": "StorageId"
},
{
- "name": "keyPath",
- "description": "Index key path.",
- "$ref": "KeyPath"
+ "name": "key",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "setDOMStorageItem",
+ "parameters": [
+ {
+ "name": "storageId",
+ "$ref": "StorageId"
},
{
- "name": "unique",
- "description": "If true, index is unique.",
- "type": "boolean"
+ "name": "key",
+ "type": "string"
},
{
- "name": "multiEntry",
- "description": "If true, index allows multiple entries for a key.",
- "type": "boolean"
+ "name": "value",
+ "type": "string"
}
]
- },
+ }
+ ],
+ "events": [
{
- "id": "Key",
- "description": "Key.",
- "type": "object",
- "properties": [
+ "name": "domStorageItemAdded",
+ "parameters": [
{
- "name": "type",
- "description": "Key type.",
- "type": "string",
- "enum": [
- "number",
- "string",
- "date",
- "array"
- ]
+ "name": "storageId",
+ "$ref": "StorageId"
},
{
- "name": "number",
- "description": "Number value.",
- "optional": true,
- "type": "number"
+ "name": "key",
+ "type": "string"
},
{
- "name": "string",
- "description": "String value.",
- "optional": true,
+ "name": "newValue",
"type": "string"
- },
+ }
+ ]
+ },
+ {
+ "name": "domStorageItemRemoved",
+ "parameters": [
{
- "name": "date",
- "description": "Date value.",
- "optional": true,
- "type": "number"
+ "name": "storageId",
+ "$ref": "StorageId"
},
{
- "name": "array",
- "description": "Array value.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Key"
- }
+ "name": "key",
+ "type": "string"
}
]
},
{
- "id": "KeyRange",
- "description": "Key range.",
- "type": "object",
- "properties": [
+ "name": "domStorageItemUpdated",
+ "parameters": [
{
- "name": "lower",
- "description": "Lower bound.",
- "optional": true,
- "$ref": "Key"
+ "name": "storageId",
+ "$ref": "StorageId"
},
{
- "name": "upper",
- "description": "Upper bound.",
- "optional": true,
- "$ref": "Key"
+ "name": "key",
+ "type": "string"
},
{
- "name": "lowerOpen",
- "description": "If true lower bound is open.",
- "type": "boolean"
+ "name": "oldValue",
+ "type": "string"
},
{
- "name": "upperOpen",
- "description": "If true upper bound is open.",
- "type": "boolean"
+ "name": "newValue",
+ "type": "string"
}
]
},
{
- "id": "DataEntry",
- "description": "Data entry.",
- "type": "object",
- "properties": [
- {
- "name": "key",
- "description": "Key object.",
- "$ref": "Runtime.RemoteObject"
- },
- {
- "name": "primaryKey",
- "description": "Primary key object.",
- "$ref": "Runtime.RemoteObject"
- },
+ "name": "domStorageItemsCleared",
+ "parameters": [
{
- "name": "value",
- "description": "Value object.",
- "$ref": "Runtime.RemoteObject"
+ "name": "storageId",
+ "$ref": "StorageId"
}
]
+ }
+ ]
+ },
+ {
+ "domain": "Database",
+ "experimental": true,
+ "types": [
+ {
+ "id": "DatabaseId",
+ "description": "Unique identifier of Database object.",
+ "type": "string"
},
{
- "id": "KeyPath",
- "description": "Key path.",
+ "id": "Database",
+ "description": "Database object.",
"type": "object",
"properties": [
{
- "name": "type",
- "description": "Key path type.",
- "type": "string",
- "enum": [
- "null",
- "string",
- "array"
- ]
- },
- {
- "name": "string",
- "description": "String value.",
- "optional": true,
- "type": "string"
+ "name": "id",
+ "description": "Database ID.",
+ "$ref": "DatabaseId"
},
{
- "name": "array",
- "description": "Array value.",
- "optional": true,
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- ]
- }
- ],
- "commands": [
- {
- "name": "clearObjectStore",
- "description": "Clears all entries from an object store.",
- "parameters": [
- {
- "name": "securityOrigin",
- "description": "Security origin.",
+ "name": "domain",
+ "description": "Database domain.",
"type": "string"
},
{
- "name": "databaseName",
+ "name": "name",
"description": "Database name.",
"type": "string"
},
{
- "name": "objectStoreName",
- "description": "Object store name.",
- "type": "string"
- }
- ]
- },
- {
- "name": "deleteDatabase",
- "description": "Deletes a database.",
- "parameters": [
- {
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
- },
- {
- "name": "databaseName",
- "description": "Database name.",
+ "name": "version",
+ "description": "Database version.",
"type": "string"
}
]
},
{
- "name": "deleteObjectStoreEntries",
- "description": "Delete a range of entries from an object store",
- "parameters": [
- {
- "name": "securityOrigin",
- "type": "string"
- },
- {
- "name": "databaseName",
- "type": "string"
- },
+ "id": "Error",
+ "description": "Database error.",
+ "type": "object",
+ "properties": [
{
- "name": "objectStoreName",
+ "name": "message",
+ "description": "Error message.",
"type": "string"
},
{
- "name": "keyRange",
- "description": "Range of entry keys to delete",
- "$ref": "KeyRange"
+ "name": "code",
+ "description": "Error code.",
+ "type": "integer"
}
]
- },
+ }
+ ],
+ "commands": [
{
"name": "disable",
- "description": "Disables events from backend."
+ "description": "Disables database tracking, prevents database events from being sent to the client."
},
{
"name": "enable",
- "description": "Enables events from backend."
+ "description": "Enables database tracking, database events will now be delivered to the client."
},
{
- "name": "requestData",
- "description": "Requests data from object store or index.",
+ "name": "executeSQL",
"parameters": [
{
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
- },
- {
- "name": "databaseName",
- "description": "Database name.",
- "type": "string"
- },
- {
- "name": "objectStoreName",
- "description": "Object store name.",
- "type": "string"
+ "name": "databaseId",
+ "$ref": "DatabaseId"
},
{
- "name": "indexName",
- "description": "Index name, empty string for object store data requests.",
+ "name": "query",
"type": "string"
- },
- {
- "name": "skipCount",
- "description": "Number of records to skip.",
- "type": "integer"
- },
- {
- "name": "pageSize",
- "description": "Number of records to fetch.",
- "type": "integer"
- },
- {
- "name": "keyRange",
- "description": "Key range.",
- "optional": true,
- "$ref": "KeyRange"
}
],
"returns": [
{
- "name": "objectStoreDataEntries",
- "description": "Array of object store data entries.",
+ "name": "columnNames",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "DataEntry"
+ "type": "string"
}
},
{
- "name": "hasMore",
- "description": "If true, there are more entries to fetch in the given range.",
- "type": "boolean"
+ "name": "values",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "any"
+ }
+ },
+ {
+ "name": "sqlError",
+ "optional": true,
+ "$ref": "Error"
}
]
},
{
- "name": "getMetadata",
- "description": "Gets metadata of an object store",
+ "name": "getDatabaseTableNames",
"parameters": [
{
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
- },
- {
- "name": "databaseName",
- "description": "Database name.",
- "type": "string"
- },
- {
- "name": "objectStoreName",
- "description": "Object store name.",
- "type": "string"
+ "name": "databaseId",
+ "$ref": "DatabaseId"
}
],
"returns": [
{
- "name": "entriesCount",
- "description": "the entries count",
- "type": "number"
- },
- {
- "name": "keyGeneratorValue",
- "description": "the current value of key generator, to become the next inserted\nkey into the object store. Valid if objectStore.autoIncrement\nis true.",
- "type": "number"
+ "name": "tableNames",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
]
- },
+ }
+ ],
+ "events": [
{
- "name": "requestDatabase",
- "description": "Requests database with given name in given frame.",
+ "name": "addDatabase",
"parameters": [
{
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
- },
- {
- "name": "databaseName",
- "description": "Database name.",
- "type": "string"
- }
- ],
- "returns": [
- {
- "name": "databaseWithObjectStores",
- "description": "Database with an array of object stores.",
- "$ref": "DatabaseWithObjectStores"
+ "name": "database",
+ "$ref": "Database"
}
]
+ }
+ ]
+ },
+ {
+ "domain": "DeviceOrientation",
+ "experimental": true,
+ "commands": [
+ {
+ "name": "clearDeviceOrientationOverride",
+ "description": "Clears the overridden Device Orientation."
},
{
- "name": "requestDatabaseNames",
- "description": "Requests database names for given security origin.",
+ "name": "setDeviceOrientationOverride",
+ "description": "Overrides the Device Orientation.",
"parameters": [
{
- "name": "securityOrigin",
- "description": "Security origin.",
- "type": "string"
- }
- ],
- "returns": [
+ "name": "alpha",
+ "description": "Mock alpha",
+ "type": "number"
+ },
{
- "name": "databaseNames",
- "description": "Database names for origin.",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "beta",
+ "description": "Mock beta",
+ "type": "number"
+ },
+ {
+ "name": "gamma",
+ "description": "Mock gamma",
+ "type": "number"
}
]
}
]
},
{
- "domain": "Input",
+ "domain": "Emulation",
+ "description": "This domain emulates different environments for the page.",
+ "dependencies": [
+ "DOM",
+ "Page",
+ "Runtime"
+ ],
"types": [
{
- "id": "TouchPoint",
+ "id": "ScreenOrientation",
+ "description": "Screen orientation.",
"type": "object",
"properties": [
{
- "name": "x",
- "description": "X coordinate of the event relative to the main frame's viewport in CSS pixels.",
- "type": "number"
+ "name": "type",
+ "description": "Orientation type.",
+ "type": "string",
+ "enum": [
+ "portraitPrimary",
+ "portraitSecondary",
+ "landscapePrimary",
+ "landscapeSecondary"
+ ]
},
{
- "name": "y",
- "description": "Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to\nthe top of the viewport and Y increases as it proceeds towards the bottom of the viewport.",
- "type": "number"
- },
+ "name": "angle",
+ "description": "Orientation angle.",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "id": "DisplayFeature",
+ "type": "object",
+ "properties": [
{
- "name": "radiusX",
- "description": "X radius of the touch area (default: 1.0).",
- "optional": true,
- "type": "number"
+ "name": "orientation",
+ "description": "Orientation of a display feature in relation to screen",
+ "type": "string",
+ "enum": [
+ "vertical",
+ "horizontal"
+ ]
},
{
- "name": "radiusY",
- "description": "Y radius of the touch area (default: 1.0).",
- "optional": true,
- "type": "number"
+ "name": "offset",
+ "description": "The offset from the screen origin in either the x (for vertical\norientation) or y (for horizontal orientation) direction.",
+ "type": "integer"
},
{
- "name": "rotationAngle",
- "description": "Rotation angle (default: 0.0).",
- "optional": true,
- "type": "number"
- },
+ "name": "maskLength",
+ "description": "A display feature may mask content such that it is not physically\ndisplayed - this length along with the offset describes this area.\nA display feature that only splits content will have a 0 mask_length.",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "id": "MediaFeature",
+ "type": "object",
+ "properties": [
{
- "name": "force",
- "description": "Force (default: 1.0).",
- "optional": true,
- "type": "number"
+ "name": "name",
+ "type": "string"
},
{
- "name": "id",
- "description": "Identifier used to track touch sources between events, must be unique within an event.",
- "optional": true,
- "type": "number"
+ "name": "value",
+ "type": "string"
}
]
},
{
- "id": "GestureSourceType",
+ "id": "VirtualTimePolicy",
+ "description": "advance: If the scheduler runs out of immediate work, the virtual time base may fast forward to\nallow the next delayed task (if any) to run; pause: The virtual time base may not advance;\npauseIfNetworkFetchesPending: The virtual time base may not advance if there are any pending\nresource fetches.",
"experimental": true,
"type": "string",
"enum": [
- "default",
- "touch",
- "mouse"
+ "advance",
+ "pause",
+ "pauseIfNetworkFetchesPending"
]
},
{
- "id": "TimeSinceEpoch",
- "description": "UTC time in seconds, counted from January 1, 1970.",
- "type": "number"
- }
- ],
- "commands": [
- {
- "name": "dispatchKeyEvent",
- "description": "Dispatches a key event to the page.",
- "parameters": [
+ "id": "UserAgentBrandVersion",
+ "description": "Used to specify User Agent Cient Hints to emulate. See https://wicg.github.io/ua-client-hints",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "type",
- "description": "Type of the key event.",
- "type": "string",
- "enum": [
- "keyDown",
- "keyUp",
- "rawKeyDown",
- "char"
- ]
+ "name": "brand",
+ "type": "string"
},
{
- "name": "modifiers",
- "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
- "optional": true,
- "type": "integer"
- },
+ "name": "version",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "UserAgentMetadata",
+ "description": "Used to specify User Agent Cient Hints to emulate. See https://wicg.github.io/ua-client-hints\nMissing optional values will be filled in by the target with what it would normally use.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "timestamp",
- "description": "Time at which the event occurred.",
+ "name": "brands",
"optional": true,
- "$ref": "TimeSinceEpoch"
+ "type": "array",
+ "items": {
+ "$ref": "UserAgentBrandVersion"
+ }
},
{
- "name": "text",
- "description": "Text as generated by processing a virtual key code with a keyboard layout. Not needed for\nfor `keyUp` and `rawKeyDown` events (default: \"\")",
+ "name": "fullVersionList",
"optional": true,
- "type": "string"
+ "type": "array",
+ "items": {
+ "$ref": "UserAgentBrandVersion"
+ }
},
{
- "name": "unmodifiedText",
- "description": "Text that would have been generated by the keyboard if no modifiers were pressed (except for\nshift). Useful for shortcut (accelerator) key handling (default: \"\").",
+ "name": "fullVersion",
+ "deprecated": true,
"optional": true,
"type": "string"
},
{
- "name": "keyIdentifier",
- "description": "Unique key identifier (e.g., 'U+0041') (default: \"\").",
- "optional": true,
+ "name": "platform",
"type": "string"
},
{
- "name": "code",
- "description": "Unique DOM defined string value for each physical key (e.g., 'KeyA') (default: \"\").",
- "optional": true,
+ "name": "platformVersion",
"type": "string"
},
{
- "name": "key",
- "description": "Unique DOM defined string value describing the meaning of the key in the context of active\nmodifiers, keyboard layout, etc (e.g., 'AltGr') (default: \"\").",
- "optional": true,
+ "name": "architecture",
"type": "string"
},
{
- "name": "windowsVirtualKeyCode",
- "description": "Windows virtual key code (default: 0).",
- "optional": true,
- "type": "integer"
- },
- {
- "name": "nativeVirtualKeyCode",
- "description": "Native virtual key code (default: 0).",
- "optional": true,
- "type": "integer"
- },
- {
- "name": "autoRepeat",
- "description": "Whether the event was generated from auto repeat (default: false).",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "isKeypad",
- "description": "Whether the event was generated from the keypad (default: false).",
- "optional": true,
- "type": "boolean"
+ "name": "model",
+ "type": "string"
},
{
- "name": "isSystemKey",
- "description": "Whether the event was a system key event (default: false).",
- "optional": true,
+ "name": "mobile",
"type": "boolean"
- },
- {
- "name": "location",
- "description": "Whether the event was from the left or right side of the keyboard. 1=Left, 2=Right (default:\n0).",
- "optional": true,
- "type": "integer"
}
]
},
{
- "name": "insertText",
- "description": "This method emulates inserting text that doesn't come from a key press,\nfor example an emoji keyboard or an IME.",
+ "id": "DisabledImageType",
+ "description": "Enum of image types that can be disabled.",
"experimental": true,
- "parameters": [
+ "type": "string",
+ "enum": [
+ "avif",
+ "jxl",
+ "webp"
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "canEmulate",
+ "description": "Tells whether emulation is supported.",
+ "returns": [
{
- "name": "text",
- "description": "The text to insert.",
- "type": "string"
+ "name": "result",
+ "description": "True if emulation is supported.",
+ "type": "boolean"
}
]
},
{
- "name": "dispatchMouseEvent",
- "description": "Dispatches a mouse event to the page.",
- "parameters": [
- {
- "name": "type",
- "description": "Type of the mouse event.",
- "type": "string",
- "enum": [
- "mousePressed",
- "mouseReleased",
- "mouseMoved",
- "mouseWheel"
- ]
+ "name": "clearDeviceMetricsOverride",
+ "description": "Clears the overridden device metrics."
+ },
+ {
+ "name": "clearGeolocationOverride",
+ "description": "Clears the overridden Geolocation Position and Error."
+ },
+ {
+ "name": "resetPageScaleFactor",
+ "description": "Requests that page scale factor is reset to initial values.",
+ "experimental": true
+ },
+ {
+ "name": "setFocusEmulationEnabled",
+ "description": "Enables or disables simulating a focused and active page.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "enabled",
+ "description": "Whether to enable to disable focus emulation.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setAutoDarkModeOverride",
+ "description": "Automatically render all web contents using a dark theme.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "enabled",
+ "description": "Whether to enable or disable automatic dark mode.\nIf not specified, any existing override will be cleared.",
+ "optional": true,
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setCPUThrottlingRate",
+ "description": "Enables CPU throttling to emulate slow CPUs.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "rate",
+ "description": "Throttling rate as a slowdown factor (1 is no throttle, 2 is 2x slowdown, etc).",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "name": "setDefaultBackgroundColorOverride",
+ "description": "Sets or clears an override of the default background color of the frame. This override is used\nif the content does not specify one.",
+ "parameters": [
+ {
+ "name": "color",
+ "description": "RGBA of the default background color. If not specified, any existing override will be\ncleared.",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ }
+ ]
+ },
+ {
+ "name": "setDeviceMetricsOverride",
+ "description": "Overrides the values of device screen dimensions (window.screen.width, window.screen.height,\nwindow.innerWidth, window.innerHeight, and \"device-width\"/\"device-height\"-related CSS media\nquery results).",
+ "parameters": [
+ {
+ "name": "width",
+ "description": "Overriding width value in pixels (minimum 0, maximum 10000000). 0 disables the override.",
+ "type": "integer"
},
{
- "name": "x",
- "description": "X coordinate of the event relative to the main frame's viewport in CSS pixels.",
+ "name": "height",
+ "description": "Overriding height value in pixels (minimum 0, maximum 10000000). 0 disables the override.",
+ "type": "integer"
+ },
+ {
+ "name": "deviceScaleFactor",
+ "description": "Overriding device scale factor value. 0 disables the override.",
"type": "number"
},
{
- "name": "y",
- "description": "Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to\nthe top of the viewport and Y increases as it proceeds towards the bottom of the viewport.",
+ "name": "mobile",
+ "description": "Whether to emulate mobile device. This includes viewport meta tag, overlay scrollbars, text\nautosizing and more.",
+ "type": "boolean"
+ },
+ {
+ "name": "scale",
+ "description": "Scale to apply to resulting view image.",
+ "experimental": true,
+ "optional": true,
"type": "number"
},
{
- "name": "modifiers",
- "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "name": "screenWidth",
+ "description": "Overriding screen width value in pixels (minimum 0, maximum 10000000).",
+ "experimental": true,
"optional": true,
"type": "integer"
},
{
- "name": "timestamp",
- "description": "Time at which the event occurred.",
+ "name": "screenHeight",
+ "description": "Overriding screen height value in pixels (minimum 0, maximum 10000000).",
+ "experimental": true,
"optional": true,
- "$ref": "TimeSinceEpoch"
+ "type": "integer"
},
{
- "name": "button",
- "description": "Mouse button (default: \"none\").",
+ "name": "positionX",
+ "description": "Overriding view X position on screen in pixels (minimum 0, maximum 10000000).",
+ "experimental": true,
"optional": true,
- "type": "string",
- "enum": [
- "none",
- "left",
- "middle",
- "right",
- "back",
- "forward"
- ]
+ "type": "integer"
},
{
- "name": "buttons",
- "description": "A number indicating which buttons are pressed on the mouse when a mouse event is triggered.\nLeft=1, Right=2, Middle=4, Back=8, Forward=16, None=0.",
+ "name": "positionY",
+ "description": "Overriding view Y position on screen in pixels (minimum 0, maximum 10000000).",
+ "experimental": true,
"optional": true,
"type": "integer"
},
{
- "name": "clickCount",
- "description": "Number of times the mouse button was clicked (default: 0).",
+ "name": "dontSetVisibleSize",
+ "description": "Do not set visible view size, rely upon explicit setVisibleSize call.",
+ "experimental": true,
"optional": true,
- "type": "integer"
+ "type": "boolean"
},
{
- "name": "deltaX",
- "description": "X delta in CSS pixels for mouse wheel event (default: 0).",
+ "name": "screenOrientation",
+ "description": "Screen orientation override.",
"optional": true,
- "type": "number"
+ "$ref": "ScreenOrientation"
},
{
- "name": "deltaY",
- "description": "Y delta in CSS pixels for mouse wheel event (default: 0).",
+ "name": "viewport",
+ "description": "If set, the visible area of the page will be overridden to this viewport. This viewport\nchange is not observed by the page, e.g. viewport-relative elements do not change positions.",
+ "experimental": true,
"optional": true,
- "type": "number"
+ "$ref": "Page.Viewport"
},
{
- "name": "pointerType",
- "description": "Pointer type (default: \"mouse\").",
+ "name": "displayFeature",
+ "description": "If set, the display feature of a multi-segment screen. If not set, multi-segment support\nis turned-off.",
+ "experimental": true,
"optional": true,
- "type": "string",
- "enum": [
- "mouse",
- "pen"
- ]
+ "$ref": "DisplayFeature"
}
]
},
{
- "name": "dispatchTouchEvent",
- "description": "Dispatches a touch event to the page.",
+ "name": "setScrollbarsHidden",
+ "experimental": true,
"parameters": [
{
- "name": "type",
- "description": "Type of the touch event. TouchEnd and TouchCancel must not contain any touch points, while\nTouchStart and TouchMove must contains at least one.",
+ "name": "hidden",
+ "description": "Whether scrollbars should be always hidden.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setDocumentCookieDisabled",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "disabled",
+ "description": "Whether document.coookie API should be disabled.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setEmitTouchEventsForMouse",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "enabled",
+ "description": "Whether touch emulation based on mouse input should be enabled.",
+ "type": "boolean"
+ },
+ {
+ "name": "configuration",
+ "description": "Touch/gesture events configuration. Default: current platform.",
+ "optional": true,
"type": "string",
"enum": [
- "touchStart",
- "touchEnd",
- "touchMove",
- "touchCancel"
+ "mobile",
+ "desktop"
]
- },
- {
- "name": "touchPoints",
- "description": "Active touch points on the touch device. One event per any changed point (compared to\nprevious touch event in a sequence) is generated, emulating pressing/moving/releasing points\none by one.",
- "type": "array",
- "items": {
- "$ref": "TouchPoint"
- }
- },
+ }
+ ]
+ },
+ {
+ "name": "setEmulatedMedia",
+ "description": "Emulates the given media type or media feature for CSS media queries.",
+ "parameters": [
{
- "name": "modifiers",
- "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "name": "media",
+ "description": "Media type to emulate. Empty string disables the override.",
"optional": true,
- "type": "integer"
+ "type": "string"
},
{
- "name": "timestamp",
- "description": "Time at which the event occurred.",
+ "name": "features",
+ "description": "Media features to emulate.",
"optional": true,
- "$ref": "TimeSinceEpoch"
+ "type": "array",
+ "items": {
+ "$ref": "MediaFeature"
+ }
}
]
},
{
- "name": "emulateTouchFromMouseEvent",
- "description": "Emulates touch event from the mouse event parameters.",
+ "name": "setEmulatedVisionDeficiency",
+ "description": "Emulates the given vision deficiency.",
"experimental": true,
"parameters": [
{
"name": "type",
- "description": "Type of the mouse event.",
- "type": "string",
- "enum": [
- "mousePressed",
- "mouseReleased",
- "mouseMoved",
- "mouseWheel"
- ]
- },
- {
- "name": "x",
- "description": "X coordinate of the mouse pointer in DIP.",
- "type": "integer"
- },
- {
- "name": "y",
- "description": "Y coordinate of the mouse pointer in DIP.",
- "type": "integer"
- },
- {
- "name": "button",
- "description": "Mouse button.",
+ "description": "Vision deficiency to emulate.",
"type": "string",
"enum": [
"none",
- "left",
- "middle",
- "right"
+ "achromatopsia",
+ "blurredVision",
+ "deuteranopia",
+ "protanopia",
+ "tritanopia"
]
- },
+ }
+ ]
+ },
+ {
+ "name": "setGeolocationOverride",
+ "description": "Overrides the Geolocation Position or Error. Omitting any of the parameters emulates position\nunavailable.",
+ "parameters": [
{
- "name": "timestamp",
- "description": "Time at which the event occurred (default: current time).",
+ "name": "latitude",
+ "description": "Mock latitude",
"optional": true,
- "$ref": "TimeSinceEpoch"
+ "type": "number"
},
{
- "name": "deltaX",
- "description": "X delta in DIP for mouse wheel event (default: 0).",
+ "name": "longitude",
+ "description": "Mock longitude",
"optional": true,
"type": "number"
},
{
- "name": "deltaY",
- "description": "Y delta in DIP for mouse wheel event (default: 0).",
+ "name": "accuracy",
+ "description": "Mock accuracy",
"optional": true,
"type": "number"
- },
+ }
+ ]
+ },
+ {
+ "name": "setIdleOverride",
+ "description": "Overrides the Idle state.",
+ "experimental": true,
+ "parameters": [
{
- "name": "modifiers",
- "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
- "optional": true,
- "type": "integer"
+ "name": "isUserActive",
+ "description": "Mock isUserActive",
+ "type": "boolean"
},
{
- "name": "clickCount",
- "description": "Number of times the mouse button was clicked (default: 0).",
- "optional": true,
- "type": "integer"
+ "name": "isScreenUnlocked",
+ "description": "Mock isScreenUnlocked",
+ "type": "boolean"
}
]
},
{
- "name": "setIgnoreInputEvents",
- "description": "Ignores input events (useful while auditing page).",
+ "name": "clearIdleOverride",
+ "description": "Clears Idle state overrides.",
+ "experimental": true
+ },
+ {
+ "name": "setNavigatorOverrides",
+ "description": "Overrides value returned by the javascript navigator object.",
+ "experimental": true,
+ "deprecated": true,
"parameters": [
{
- "name": "ignore",
- "description": "Ignores input events processing when set to true.",
- "type": "boolean"
+ "name": "platform",
+ "description": "The platform navigator.platform should return.",
+ "type": "string"
}
]
},
{
- "name": "synthesizePinchGesture",
- "description": "Synthesizes a pinch gesture over a time period by issuing appropriate touch events.",
+ "name": "setPageScaleFactor",
+ "description": "Sets a specified page scale factor.",
"experimental": true,
"parameters": [
{
- "name": "x",
- "description": "X coordinate of the start of the gesture in CSS pixels.",
+ "name": "pageScaleFactor",
+ "description": "Page scale factor.",
"type": "number"
- },
+ }
+ ]
+ },
+ {
+ "name": "setScriptExecutionDisabled",
+ "description": "Switches script execution in the page.",
+ "parameters": [
{
- "name": "y",
- "description": "Y coordinate of the start of the gesture in CSS pixels.",
- "type": "number"
- },
+ "name": "value",
+ "description": "Whether script execution should be disabled in the page.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setTouchEmulationEnabled",
+ "description": "Enables touch on platforms which do not support them.",
+ "parameters": [
{
- "name": "scaleFactor",
- "description": "Relative scale factor after zooming (>1.0 zooms in, <1.0 zooms out).",
- "type": "number"
+ "name": "enabled",
+ "description": "Whether the touch event emulation should be enabled.",
+ "type": "boolean"
},
{
- "name": "relativeSpeed",
- "description": "Relative pointer speed in pixels per second (default: 800).",
+ "name": "maxTouchPoints",
+ "description": "Maximum touch points supported. Defaults to one.",
"optional": true,
"type": "integer"
- },
- {
- "name": "gestureSourceType",
- "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
- "optional": true,
- "$ref": "GestureSourceType"
}
]
},
{
- "name": "synthesizeScrollGesture",
- "description": "Synthesizes a scroll gesture over a time period by issuing appropriate touch events.",
+ "name": "setVirtualTimePolicy",
+ "description": "Turns on virtual time for all frames (replacing real-time with a synthetic time source) and sets\nthe current virtual time policy. Note this supersedes any previous time budget.",
"experimental": true,
"parameters": [
{
- "name": "x",
- "description": "X coordinate of the start of the gesture in CSS pixels.",
- "type": "number"
- },
- {
- "name": "y",
- "description": "Y coordinate of the start of the gesture in CSS pixels.",
- "type": "number"
+ "name": "policy",
+ "$ref": "VirtualTimePolicy"
},
{
- "name": "xDistance",
- "description": "The distance to scroll along the X axis (positive to scroll left).",
+ "name": "budget",
+ "description": "If set, after this many virtual milliseconds have elapsed virtual time will be paused and a\nvirtualTimeBudgetExpired event is sent.",
"optional": true,
"type": "number"
},
{
- "name": "yDistance",
- "description": "The distance to scroll along the Y axis (positive to scroll up).",
+ "name": "maxVirtualTimeTaskStarvationCount",
+ "description": "If set this specifies the maximum number of tasks that can be run before virtual is forced\nforwards to prevent deadlock.",
"optional": true,
- "type": "number"
+ "type": "integer"
},
{
- "name": "xOverscroll",
- "description": "The number of additional pixels to scroll back along the X axis, in addition to the given\ndistance.",
+ "name": "initialVirtualTime",
+ "description": "If set, base::Time::Now will be overridden to initially return this value.",
"optional": true,
- "type": "number"
- },
+ "$ref": "Network.TimeSinceEpoch"
+ }
+ ],
+ "returns": [
{
- "name": "yOverscroll",
- "description": "The number of additional pixels to scroll back along the Y axis, in addition to the given\ndistance.",
- "optional": true,
+ "name": "virtualTimeTicksBase",
+ "description": "Absolute timestamp at which virtual time was first enabled (up time in milliseconds).",
"type": "number"
- },
- {
- "name": "preventFling",
- "description": "Prevent fling (default: true).",
- "optional": true,
- "type": "boolean"
- },
+ }
+ ]
+ },
+ {
+ "name": "setLocaleOverride",
+ "description": "Overrides default host system locale with the specified one.",
+ "experimental": true,
+ "parameters": [
{
- "name": "speed",
- "description": "Swipe speed in pixels per second (default: 800).",
+ "name": "locale",
+ "description": "ICU style C locale (e.g. \"en_US\"). If not specified or empty, disables the override and\nrestores default host system locale.",
"optional": true,
- "type": "integer"
- },
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "setTimezoneOverride",
+ "description": "Overrides default host system timezone with the specified one.",
+ "experimental": true,
+ "parameters": [
{
- "name": "gestureSourceType",
- "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
- "optional": true,
- "$ref": "GestureSourceType"
- },
+ "name": "timezoneId",
+ "description": "The timezone identifier. If empty, disables the override and\nrestores default host system timezone.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "setVisibleSize",
+ "description": "Resizes the frame/viewport of the page. Note that this does not affect the frame's container\n(e.g. browser window). Can be used to produce screenshots of the specified size. Not supported\non Android.",
+ "experimental": true,
+ "deprecated": true,
+ "parameters": [
{
- "name": "repeatCount",
- "description": "The number of times to repeat the gesture (default: 0).",
- "optional": true,
+ "name": "width",
+ "description": "Frame width (DIP).",
"type": "integer"
},
{
- "name": "repeatDelayMs",
- "description": "The number of milliseconds delay between each repeat. (default: 250).",
- "optional": true,
+ "name": "height",
+ "description": "Frame height (DIP).",
"type": "integer"
- },
- {
- "name": "interactionMarkerName",
- "description": "The name of the interaction markers to generate, if not empty (default: \"\").",
- "optional": true,
- "type": "string"
}
]
},
{
- "name": "synthesizeTapGesture",
- "description": "Synthesizes a tap gesture over a time period by issuing appropriate touch events.",
+ "name": "setDisabledImageTypes",
"experimental": true,
"parameters": [
{
- "name": "x",
- "description": "X coordinate of the start of the gesture in CSS pixels.",
- "type": "number"
- },
+ "name": "imageTypes",
+ "description": "Image types to disable.",
+ "type": "array",
+ "items": {
+ "$ref": "DisabledImageType"
+ }
+ }
+ ]
+ },
+ {
+ "name": "setUserAgentOverride",
+ "description": "Allows overriding user agent with the given string.",
+ "parameters": [
{
- "name": "y",
- "description": "Y coordinate of the start of the gesture in CSS pixels.",
- "type": "number"
+ "name": "userAgent",
+ "description": "User agent to use.",
+ "type": "string"
},
{
- "name": "duration",
- "description": "Duration between touchdown and touchup events in ms (default: 50).",
+ "name": "acceptLanguage",
+ "description": "Browser langugage to emulate.",
"optional": true,
- "type": "integer"
+ "type": "string"
},
{
- "name": "tapCount",
- "description": "Number of times to perform the tap (e.g. 2 for double tap, default: 1).",
+ "name": "platform",
+ "description": "The platform navigator.platform should return.",
"optional": true,
- "type": "integer"
+ "type": "string"
},
{
- "name": "gestureSourceType",
- "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
+ "name": "userAgentMetadata",
+ "description": "To be sent in Sec-CH-UA-* headers and returned in navigator.userAgentData",
+ "experimental": true,
"optional": true,
- "$ref": "GestureSourceType"
+ "$ref": "UserAgentMetadata"
}
]
}
- ]
- },
- {
- "domain": "Inspector",
- "experimental": true,
- "commands": [
- {
- "name": "disable",
- "description": "Disables inspector domain notifications."
- },
- {
- "name": "enable",
- "description": "Enables inspector domain notifications."
- }
],
"events": [
{
- "name": "detached",
- "description": "Fired when remote debugging connection is about to be terminated. Contains detach reason.",
- "parameters": [
- {
- "name": "reason",
- "description": "The reason why connection has been terminated.",
- "type": "string"
- }
- ]
- },
- {
- "name": "targetCrashed",
- "description": "Fired when debugging target has crashed"
- },
- {
- "name": "targetReloadedAfterCrash",
- "description": "Fired when debugging target has reloaded after crash"
+ "name": "virtualTimeBudgetExpired",
+ "description": "Notification sent after the virtual time budget for the current VirtualTimePolicy has run out.",
+ "experimental": true
}
]
},
{
- "domain": "LayerTree",
+ "domain": "HeadlessExperimental",
+ "description": "This domain provides experimental commands only supported in headless mode.",
"experimental": true,
"dependencies": [
- "DOM"
+ "Page",
+ "Runtime"
],
"types": [
{
- "id": "LayerId",
- "description": "Unique Layer identifier.",
- "type": "string"
- },
- {
- "id": "SnapshotId",
- "description": "Unique snapshot identifier.",
- "type": "string"
- },
- {
- "id": "ScrollRect",
- "description": "Rectangle where scrolling happens on the main thread.",
+ "id": "ScreenshotParams",
+ "description": "Encoding options for a screenshot.",
"type": "object",
"properties": [
{
- "name": "rect",
- "description": "Rectangle itself.",
- "$ref": "DOM.Rect"
- },
- {
- "name": "type",
- "description": "Reason for rectangle to force scrolling on the main thread",
+ "name": "format",
+ "description": "Image compression format (defaults to png).",
+ "optional": true,
"type": "string",
"enum": [
- "RepaintsOnScroll",
- "TouchEventHandler",
- "WheelEventHandler"
+ "jpeg",
+ "png"
]
+ },
+ {
+ "name": "quality",
+ "description": "Compression quality from range [0..100] (jpeg only).",
+ "optional": true,
+ "type": "integer"
}
]
- },
+ }
+ ],
+ "commands": [
{
- "id": "StickyPositionConstraint",
- "description": "Sticky position constraints.",
- "type": "object",
- "properties": [
+ "name": "beginFrame",
+ "description": "Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a\nscreenshot from the resulting frame. Requires that the target was created with enabled\nBeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also\nhttps://goo.gl/3zHXhB for more background.",
+ "parameters": [
{
- "name": "stickyBoxRect",
- "description": "Layout rectangle of the sticky element before being shifted",
- "$ref": "DOM.Rect"
+ "name": "frameTimeTicks",
+ "description": "Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set,\nthe current time will be used.",
+ "optional": true,
+ "type": "number"
},
{
- "name": "containingBlockRect",
- "description": "Layout rectangle of the containing block of the sticky element",
- "$ref": "DOM.Rect"
+ "name": "interval",
+ "description": "The interval between BeginFrames that is reported to the compositor, in milliseconds.\nDefaults to a 60 frames/second interval, i.e. about 16.666 milliseconds.",
+ "optional": true,
+ "type": "number"
},
{
- "name": "nearestLayerShiftingStickyBox",
- "description": "The nearest sticky layer that shifts the sticky box",
+ "name": "noDisplayUpdates",
+ "description": "Whether updates should not be committed and drawn onto the display. False by default. If\ntrue, only side effects of the BeginFrame will be run, such as layout and animations, but\nany visual updates may not be visible on the display or in screenshots.",
"optional": true,
- "$ref": "LayerId"
+ "type": "boolean"
},
{
- "name": "nearestLayerShiftingContainingBlock",
- "description": "The nearest sticky layer that shifts the containing block",
+ "name": "screenshot",
+ "description": "If set, a screenshot of the frame will be captured and returned in the response. Otherwise,\nno screenshot will be captured. Note that capturing a screenshot can fail, for example,\nduring renderer initialization. In such a case, no screenshot data will be returned.",
"optional": true,
- "$ref": "LayerId"
+ "$ref": "ScreenshotParams"
}
- ]
- },
- {
- "id": "PictureTile",
- "description": "Serialized fragment of layer picture along with its offset within the layer.",
- "type": "object",
- "properties": [
- {
- "name": "x",
- "description": "Offset from owning layer left boundary",
- "type": "number"
- },
+ ],
+ "returns": [
{
- "name": "y",
- "description": "Offset from owning layer top boundary",
- "type": "number"
+ "name": "hasDamage",
+ "description": "Whether the BeginFrame resulted in damage and, thus, a new frame was committed to the\ndisplay. Reported for diagnostic uses, may be removed in the future.",
+ "type": "boolean"
},
{
- "name": "picture",
- "description": "Base64-encoded snapshot data.",
+ "name": "screenshotData",
+ "description": "Base64-encoded image data of the screenshot, if one was requested and successfully taken. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
"type": "string"
}
]
},
{
- "id": "Layer",
- "description": "Information about a compositing layer.",
- "type": "object",
- "properties": [
- {
- "name": "layerId",
- "description": "The unique id for this layer.",
- "$ref": "LayerId"
- },
- {
- "name": "parentLayerId",
- "description": "The id of parent (not present for root).",
- "optional": true,
- "$ref": "LayerId"
- },
- {
- "name": "backendNodeId",
- "description": "The backend id for the node associated with this layer.",
- "optional": true,
- "$ref": "DOM.BackendNodeId"
- },
- {
- "name": "offsetX",
- "description": "Offset from parent layer, X coordinate.",
- "type": "number"
- },
- {
- "name": "offsetY",
- "description": "Offset from parent layer, Y coordinate.",
- "type": "number"
- },
- {
- "name": "width",
- "description": "Layer width.",
- "type": "number"
- },
+ "name": "disable",
+ "description": "Disables headless events for the target."
+ },
+ {
+ "name": "enable",
+ "description": "Enables headless events for the target."
+ }
+ ],
+ "events": [
+ {
+ "name": "needsBeginFramesChanged",
+ "description": "Issued when the target starts or stops needing BeginFrames.\nDeprecated. Issue beginFrame unconditionally instead and use result from\nbeginFrame to detect whether the frames were suppressed.",
+ "deprecated": true,
+ "parameters": [
{
- "name": "height",
- "description": "Layer height.",
- "type": "number"
- },
+ "name": "needsBeginFrames",
+ "description": "True if BeginFrames are needed, false otherwise.",
+ "type": "boolean"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "IO",
+ "description": "Input/Output operations for streams produced by DevTools.",
+ "types": [
+ {
+ "id": "StreamHandle",
+ "description": "This is either obtained from another method or specified as `blob:<uuid>` where\n`<uuid>` is an UUID of a Blob.",
+ "type": "string"
+ }
+ ],
+ "commands": [
+ {
+ "name": "close",
+ "description": "Close the stream, discard any temporary backing storage.",
+ "parameters": [
{
- "name": "transform",
- "description": "Transformation matrix for layer, default is identity matrix",
- "optional": true,
- "type": "array",
- "items": {
- "type": "number"
- }
- },
+ "name": "handle",
+ "description": "Handle of the stream to close.",
+ "$ref": "StreamHandle"
+ }
+ ]
+ },
+ {
+ "name": "read",
+ "description": "Read a chunk of the stream",
+ "parameters": [
{
- "name": "anchorX",
- "description": "Transform anchor point X, absent if no transform specified",
- "optional": true,
- "type": "number"
+ "name": "handle",
+ "description": "Handle of the stream to read.",
+ "$ref": "StreamHandle"
},
{
- "name": "anchorY",
- "description": "Transform anchor point Y, absent if no transform specified",
+ "name": "offset",
+ "description": "Seek to the specified offset before reading (if not specificed, proceed with offset\nfollowing the last read). Some types of streams may only support sequential reads.",
"optional": true,
- "type": "number"
+ "type": "integer"
},
{
- "name": "anchorZ",
- "description": "Transform anchor point Z, absent if no transform specified",
+ "name": "size",
+ "description": "Maximum number of bytes to read (left upon the agent discretion if not specified).",
"optional": true,
- "type": "number"
- },
- {
- "name": "paintCount",
- "description": "Indicates how many time this layer has painted.",
"type": "integer"
- },
- {
- "name": "drawsContent",
- "description": "Indicates whether this layer hosts any content, rather than being used for\ntransform/scrolling purposes only.",
- "type": "boolean"
- },
+ }
+ ],
+ "returns": [
{
- "name": "invisible",
- "description": "Set if layer is not visible.",
+ "name": "base64Encoded",
+ "description": "Set if the data is base64-encoded",
"optional": true,
"type": "boolean"
},
{
- "name": "scrollRects",
- "description": "Rectangles scrolling on main thread only.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "ScrollRect"
- }
+ "name": "data",
+ "description": "Data that were read.",
+ "type": "string"
},
{
- "name": "stickyPositionConstraint",
- "description": "Sticky position constraint information",
- "optional": true,
- "$ref": "StickyPositionConstraint"
+ "name": "eof",
+ "description": "Set if the end-of-file condition occurred while reading.",
+ "type": "boolean"
}
]
},
{
- "id": "PaintProfile",
- "description": "Array of timings, one per paint step.",
- "type": "array",
- "items": {
- "type": "number"
- }
- }
- ],
- "commands": [
- {
- "name": "compositingReasons",
- "description": "Provides the reasons why the given layer was composited.",
+ "name": "resolveBlob",
+ "description": "Return UUID of Blob object specified by a remote object id.",
"parameters": [
{
- "name": "layerId",
- "description": "The id of the layer for which we want to get the reasons it was composited.",
- "$ref": "LayerId"
+ "name": "objectId",
+ "description": "Object id of a Blob object wrapper.",
+ "$ref": "Runtime.RemoteObjectId"
}
],
"returns": [
{
- "name": "compositingReasons",
- "description": "A list of strings specifying reasons for the given layer to become composited.",
+ "name": "uuid",
+ "description": "UUID of the specified Blob.",
+ "type": "string"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "IndexedDB",
+ "experimental": true,
+ "dependencies": [
+ "Runtime"
+ ],
+ "types": [
+ {
+ "id": "DatabaseWithObjectStores",
+ "description": "Database with an array of object stores.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "description": "Database name.",
+ "type": "string"
+ },
+ {
+ "name": "version",
+ "description": "Database version (type is not 'integer', as the standard\nrequires the version number to be 'unsigned long long')",
+ "type": "number"
+ },
+ {
+ "name": "objectStores",
+ "description": "Object stores in this database.",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "ObjectStore"
}
}
]
},
{
- "name": "disable",
- "description": "Disables compositing tree inspection."
- },
- {
- "name": "enable",
- "description": "Enables compositing tree inspection."
- },
- {
- "name": "loadSnapshot",
- "description": "Returns the snapshot identifier.",
- "parameters": [
+ "id": "ObjectStore",
+ "description": "Object store.",
+ "type": "object",
+ "properties": [
{
- "name": "tiles",
- "description": "An array of tiles composing the snapshot.",
+ "name": "name",
+ "description": "Object store name.",
+ "type": "string"
+ },
+ {
+ "name": "keyPath",
+ "description": "Object store key path.",
+ "$ref": "KeyPath"
+ },
+ {
+ "name": "autoIncrement",
+ "description": "If true, object store has auto increment flag set.",
+ "type": "boolean"
+ },
+ {
+ "name": "indexes",
+ "description": "Indexes in this object store.",
"type": "array",
"items": {
- "$ref": "PictureTile"
+ "$ref": "ObjectStoreIndex"
}
}
- ],
- "returns": [
- {
- "name": "snapshotId",
- "description": "The id of the snapshot.",
- "$ref": "SnapshotId"
- }
]
},
{
- "name": "makeSnapshot",
- "description": "Returns the layer snapshot identifier.",
- "parameters": [
+ "id": "ObjectStoreIndex",
+ "description": "Object store index.",
+ "type": "object",
+ "properties": [
{
- "name": "layerId",
- "description": "The id of the layer.",
- "$ref": "LayerId"
- }
- ],
- "returns": [
+ "name": "name",
+ "description": "Index name.",
+ "type": "string"
+ },
{
- "name": "snapshotId",
- "description": "The id of the layer snapshot.",
- "$ref": "SnapshotId"
+ "name": "keyPath",
+ "description": "Index key path.",
+ "$ref": "KeyPath"
+ },
+ {
+ "name": "unique",
+ "description": "If true, index is unique.",
+ "type": "boolean"
+ },
+ {
+ "name": "multiEntry",
+ "description": "If true, index allows multiple entries for a key.",
+ "type": "boolean"
}
]
},
{
- "name": "profileSnapshot",
- "parameters": [
+ "id": "Key",
+ "description": "Key.",
+ "type": "object",
+ "properties": [
{
- "name": "snapshotId",
- "description": "The id of the layer snapshot.",
- "$ref": "SnapshotId"
+ "name": "type",
+ "description": "Key type.",
+ "type": "string",
+ "enum": [
+ "number",
+ "string",
+ "date",
+ "array"
+ ]
},
{
- "name": "minRepeatCount",
- "description": "The maximum number of times to replay the snapshot (1, if not specified).",
+ "name": "number",
+ "description": "Number value.",
"optional": true,
- "type": "integer"
+ "type": "number"
},
{
- "name": "minDuration",
- "description": "The minimum duration (in seconds) to replay the snapshot.",
+ "name": "string",
+ "description": "String value.",
"optional": true,
- "type": "number"
+ "type": "string"
},
{
- "name": "clipRect",
- "description": "The clip rectangle to apply when replaying the snapshot.",
+ "name": "date",
+ "description": "Date value.",
"optional": true,
- "$ref": "DOM.Rect"
- }
- ],
- "returns": [
+ "type": "number"
+ },
{
- "name": "timings",
- "description": "The array of paint profiles, one per run.",
+ "name": "array",
+ "description": "Array value.",
+ "optional": true,
"type": "array",
"items": {
- "$ref": "PaintProfile"
+ "$ref": "Key"
}
}
]
},
{
- "name": "releaseSnapshot",
- "description": "Releases layer snapshot captured by the back-end.",
- "parameters": [
- {
- "name": "snapshotId",
- "description": "The id of the layer snapshot.",
- "$ref": "SnapshotId"
- }
- ]
- },
- {
- "name": "replaySnapshot",
- "description": "Replays the layer snapshot and returns the resulting bitmap.",
- "parameters": [
- {
- "name": "snapshotId",
- "description": "The id of the layer snapshot.",
- "$ref": "SnapshotId"
- },
+ "id": "KeyRange",
+ "description": "Key range.",
+ "type": "object",
+ "properties": [
{
- "name": "fromStep",
- "description": "The first step to replay from (replay from the very start if not specified).",
+ "name": "lower",
+ "description": "Lower bound.",
"optional": true,
- "type": "integer"
+ "$ref": "Key"
},
{
- "name": "toStep",
- "description": "The last step to replay to (replay till the end if not specified).",
+ "name": "upper",
+ "description": "Upper bound.",
"optional": true,
- "type": "integer"
+ "$ref": "Key"
},
{
- "name": "scale",
- "description": "The scale to apply while replaying (defaults to 1).",
- "optional": true,
- "type": "number"
- }
- ],
- "returns": [
+ "name": "lowerOpen",
+ "description": "If true lower bound is open.",
+ "type": "boolean"
+ },
{
- "name": "dataURL",
- "description": "A data: URL for resulting image.",
- "type": "string"
+ "name": "upperOpen",
+ "description": "If true upper bound is open.",
+ "type": "boolean"
}
]
},
{
- "name": "snapshotCommandLog",
- "description": "Replays the layer snapshot and returns canvas log.",
- "parameters": [
- {
- "name": "snapshotId",
- "description": "The id of the layer snapshot.",
- "$ref": "SnapshotId"
- }
- ],
- "returns": [
+ "id": "DataEntry",
+ "description": "Data entry.",
+ "type": "object",
+ "properties": [
{
- "name": "commandLog",
- "description": "The array of canvas function calls.",
- "type": "array",
- "items": {
- "type": "object"
- }
- }
- ]
- }
- ],
- "events": [
- {
- "name": "layerPainted",
- "parameters": [
+ "name": "key",
+ "description": "Key object.",
+ "$ref": "Runtime.RemoteObject"
+ },
{
- "name": "layerId",
- "description": "The id of the painted layer.",
- "$ref": "LayerId"
+ "name": "primaryKey",
+ "description": "Primary key object.",
+ "$ref": "Runtime.RemoteObject"
},
{
- "name": "clip",
- "description": "Clip rectangle.",
- "$ref": "DOM.Rect"
+ "name": "value",
+ "description": "Value object.",
+ "$ref": "Runtime.RemoteObject"
}
]
},
{
- "name": "layerTreeDidChange",
- "parameters": [
- {
- "name": "layers",
- "description": "Layer tree, absent if not in the comspositing mode.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Layer"
- }
- }
- ]
- }
- ]
- },
- {
- "domain": "Log",
- "description": "Provides access to log entries.",
- "dependencies": [
- "Runtime",
- "Network"
- ],
- "types": [
- {
- "id": "LogEntry",
- "description": "Log entry.",
+ "id": "KeyPath",
+ "description": "Key path.",
"type": "object",
"properties": [
{
- "name": "source",
- "description": "Log entry source.",
- "type": "string",
- "enum": [
- "xml",
- "javascript",
- "network",
- "storage",
- "appcache",
- "rendering",
- "security",
- "deprecation",
- "worker",
- "violation",
- "intervention",
- "recommendation",
- "other"
- ]
- },
- {
- "name": "level",
- "description": "Log entry severity.",
+ "name": "type",
+ "description": "Key path type.",
"type": "string",
"enum": [
- "verbose",
- "info",
- "warning",
- "error"
+ "null",
+ "string",
+ "array"
]
},
{
- "name": "text",
- "description": "Logged text.",
- "type": "string"
- },
- {
- "name": "timestamp",
- "description": "Timestamp when this entry was added.",
- "$ref": "Runtime.Timestamp"
- },
- {
- "name": "url",
- "description": "URL of the resource if known.",
+ "name": "string",
+ "description": "String value.",
"optional": true,
"type": "string"
},
{
- "name": "lineNumber",
- "description": "Line number in the resource.",
+ "name": "array",
+ "description": "Array value.",
"optional": true,
- "type": "integer"
- },
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "clearObjectStore",
+ "description": "Clears all entries from an object store.",
+ "parameters": [
{
- "name": "stackTrace",
- "description": "JavaScript stack trace.",
- "optional": true,
- "$ref": "Runtime.StackTrace"
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
},
{
- "name": "networkRequestId",
- "description": "Identifier of the network request associated with this entry.",
- "optional": true,
- "$ref": "Network.RequestId"
+ "name": "databaseName",
+ "description": "Database name.",
+ "type": "string"
},
{
- "name": "workerId",
- "description": "Identifier of the worker associated with this entry.",
- "optional": true,
+ "name": "objectStoreName",
+ "description": "Object store name.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "deleteDatabase",
+ "description": "Deletes a database.",
+ "parameters": [
+ {
+ "name": "securityOrigin",
+ "description": "Security origin.",
"type": "string"
},
{
- "name": "args",
- "description": "Call arguments.",
- "optional": true,
- "type": "array",
- "items": {
- "$ref": "Runtime.RemoteObject"
- }
+ "name": "databaseName",
+ "description": "Database name.",
+ "type": "string"
}
]
},
{
- "id": "ViolationSetting",
- "description": "Violation configuration setting.",
- "type": "object",
- "properties": [
+ "name": "deleteObjectStoreEntries",
+ "description": "Delete a range of entries from an object store",
+ "parameters": [
{
- "name": "name",
- "description": "Violation type.",
- "type": "string",
- "enum": [
- "longTask",
- "longLayout",
- "blockedEvent",
- "blockedParser",
- "discouragedAPIUse",
- "handler",
- "recurringHandler"
- ]
+ "name": "securityOrigin",
+ "type": "string"
},
{
- "name": "threshold",
- "description": "Time threshold to trigger upon.",
- "type": "number"
+ "name": "databaseName",
+ "type": "string"
+ },
+ {
+ "name": "objectStoreName",
+ "type": "string"
+ },
+ {
+ "name": "keyRange",
+ "description": "Range of entry keys to delete",
+ "$ref": "KeyRange"
}
]
- }
- ],
- "commands": [
- {
- "name": "clear",
- "description": "Clears the log."
},
{
"name": "disable",
- "description": "Disables log domain, prevents further log entries from being reported to the client."
+ "description": "Disables events from backend."
},
{
"name": "enable",
- "description": "Enables log domain, sends the entries collected so far to the client by means of the\n`entryAdded` notification."
+ "description": "Enables events from backend."
},
{
- "name": "startViolationsReport",
- "description": "start violation reporting.",
+ "name": "requestData",
+ "description": "Requests data from object store or index.",
"parameters": [
{
- "name": "config",
- "description": "Configuration for violations.",
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
+ },
+ {
+ "name": "databaseName",
+ "description": "Database name.",
+ "type": "string"
+ },
+ {
+ "name": "objectStoreName",
+ "description": "Object store name.",
+ "type": "string"
+ },
+ {
+ "name": "indexName",
+ "description": "Index name, empty string for object store data requests.",
+ "type": "string"
+ },
+ {
+ "name": "skipCount",
+ "description": "Number of records to skip.",
+ "type": "integer"
+ },
+ {
+ "name": "pageSize",
+ "description": "Number of records to fetch.",
+ "type": "integer"
+ },
+ {
+ "name": "keyRange",
+ "description": "Key range.",
+ "optional": true,
+ "$ref": "KeyRange"
+ }
+ ],
+ "returns": [
+ {
+ "name": "objectStoreDataEntries",
+ "description": "Array of object store data entries.",
"type": "array",
"items": {
- "$ref": "ViolationSetting"
+ "$ref": "DataEntry"
}
+ },
+ {
+ "name": "hasMore",
+ "description": "If true, there are more entries to fetch in the given range.",
+ "type": "boolean"
}
]
},
{
- "name": "stopViolationsReport",
- "description": "Stop violation reporting."
- }
- ],
- "events": [
- {
- "name": "entryAdded",
- "description": "Issued when new message was logged.",
+ "name": "getMetadata",
+ "description": "Gets metadata of an object store",
"parameters": [
{
- "name": "entry",
- "description": "The entry.",
- "$ref": "LogEntry"
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
+ },
+ {
+ "name": "databaseName",
+ "description": "Database name.",
+ "type": "string"
+ },
+ {
+ "name": "objectStoreName",
+ "description": "Object store name.",
+ "type": "string"
}
- ]
- }
- ]
- },
- {
- "domain": "Memory",
- "experimental": true,
- "types": [
- {
- "id": "PressureLevel",
- "description": "Memory pressure level.",
- "type": "string",
- "enum": [
- "moderate",
- "critical"
- ]
- },
- {
- "id": "SamplingProfileNode",
- "description": "Heap profile sample.",
- "type": "object",
- "properties": [
+ ],
+ "returns": [
{
- "name": "size",
- "description": "Size of the sampled allocation.",
+ "name": "entriesCount",
+ "description": "the entries count",
"type": "number"
},
{
- "name": "total",
- "description": "Total bytes attributed to this sample.",
+ "name": "keyGeneratorValue",
+ "description": "the current value of key generator, to become the next inserted\nkey into the object store. Valid if objectStore.autoIncrement\nis true.",
"type": "number"
+ }
+ ]
+ },
+ {
+ "name": "requestDatabase",
+ "description": "Requests database with given name in given frame.",
+ "parameters": [
+ {
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
},
{
- "name": "stack",
- "description": "Execution stack at the point of allocation.",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "databaseName",
+ "description": "Database name.",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "databaseWithObjectStores",
+ "description": "Database with an array of object stores.",
+ "$ref": "DatabaseWithObjectStores"
}
]
},
{
- "id": "SamplingProfile",
- "description": "Array of heap profile samples.",
- "type": "object",
- "properties": [
+ "name": "requestDatabaseNames",
+ "description": "Requests database names for given security origin.",
+ "parameters": [
{
- "name": "samples",
- "type": "array",
- "items": {
- "$ref": "SamplingProfileNode"
- }
- },
+ "name": "securityOrigin",
+ "description": "Security origin.",
+ "type": "string"
+ }
+ ],
+ "returns": [
{
- "name": "modules",
+ "name": "databaseNames",
+ "description": "Database names for origin.",
"type": "array",
"items": {
- "$ref": "Module"
+ "type": "string"
}
}
]
- },
+ }
+ ]
+ },
+ {
+ "domain": "Input",
+ "types": [
{
- "id": "Module",
- "description": "Executable module information",
+ "id": "TouchPoint",
"type": "object",
"properties": [
{
- "name": "name",
- "description": "Name of the module.",
- "type": "string"
+ "name": "x",
+ "description": "X coordinate of the event relative to the main frame's viewport in CSS pixels.",
+ "type": "number"
},
{
- "name": "uuid",
- "description": "UUID of the module.",
- "type": "string"
+ "name": "y",
+ "description": "Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to\nthe top of the viewport and Y increases as it proceeds towards the bottom of the viewport.",
+ "type": "number"
},
{
- "name": "baseAddress",
- "description": "Base address where the module is loaded into memory. Encoded as a decimal\nor hexadecimal (0x prefixed) string.",
- "type": "string"
+ "name": "radiusX",
+ "description": "X radius of the touch area (default: 1.0).",
+ "optional": true,
+ "type": "number"
},
{
- "name": "size",
- "description": "Size of the module in bytes.",
+ "name": "radiusY",
+ "description": "Y radius of the touch area (default: 1.0).",
+ "optional": true,
"type": "number"
- }
- ]
- }
- ],
- "commands": [
- {
- "name": "getDOMCounters",
- "returns": [
+ },
{
- "name": "documents",
+ "name": "rotationAngle",
+ "description": "Rotation angle (default: 0.0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "force",
+ "description": "Force (default: 1.0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "tangentialPressure",
+ "description": "The normalized tangential pressure, which has a range of [-1,1] (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "tiltX",
+ "description": "The plane angle between the Y-Z plane and the plane containing both the stylus axis and the Y axis, in degrees of the range [-90,90], a positive tiltX is to the right (default: 0)",
+ "experimental": true,
+ "optional": true,
"type": "integer"
},
{
- "name": "nodes",
+ "name": "tiltY",
+ "description": "The plane angle between the X-Z plane and the plane containing both the stylus axis and the X axis, in degrees of the range [-90,90], a positive tiltY is towards the user (default: 0).",
+ "experimental": true,
+ "optional": true,
"type": "integer"
},
{
- "name": "jsEventListeners",
+ "name": "twist",
+ "description": "The clockwise rotation of a pen stylus around its own major axis, in degrees in the range [0,359] (default: 0).",
+ "experimental": true,
+ "optional": true,
"type": "integer"
+ },
+ {
+ "name": "id",
+ "description": "Identifier used to track touch sources between events, must be unique within an event.",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "name": "prepareForLeakDetection"
- },
- {
- "name": "forciblyPurgeJavaScriptMemory",
- "description": "Simulate OomIntervention by purging V8 memory."
+ "id": "GestureSourceType",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "default",
+ "touch",
+ "mouse"
+ ]
},
{
- "name": "setPressureNotificationsSuppressed",
- "description": "Enable/disable suppressing memory pressure notifications in all processes.",
- "parameters": [
- {
- "name": "suppressed",
- "description": "If true, memory pressure notifications will be suppressed.",
- "type": "boolean"
- }
+ "id": "MouseButton",
+ "type": "string",
+ "enum": [
+ "none",
+ "left",
+ "middle",
+ "right",
+ "back",
+ "forward"
]
},
{
- "name": "simulatePressureNotification",
- "description": "Simulate a memory pressure notification in all processes.",
- "parameters": [
- {
- "name": "level",
- "description": "Memory pressure level of the notification.",
- "$ref": "PressureLevel"
- }
- ]
+ "id": "TimeSinceEpoch",
+ "description": "UTC time in seconds, counted from January 1, 1970.",
+ "type": "number"
},
{
- "name": "startSampling",
- "description": "Start collecting native memory profile.",
- "parameters": [
+ "id": "DragDataItem",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "samplingInterval",
- "description": "Average number of bytes between samples.",
+ "name": "mimeType",
+ "description": "Mime type of the dragged data.",
+ "type": "string"
+ },
+ {
+ "name": "data",
+ "description": "Depending of the value of `mimeType`, it contains the dragged link,\ntext, HTML markup or any other data.",
+ "type": "string"
+ },
+ {
+ "name": "title",
+ "description": "Title associated with a link. Only valid when `mimeType` == \"text/uri-list\".",
"optional": true,
- "type": "integer"
+ "type": "string"
},
{
- "name": "suppressRandomness",
- "description": "Do not randomize intervals between samples.",
+ "name": "baseURL",
+ "description": "Stores the base URL for the contained markup. Only valid when `mimeType`\n== \"text/html\".",
"optional": true,
- "type": "boolean"
+ "type": "string"
}
]
},
{
- "name": "stopSampling",
- "description": "Stop collecting native memory profile."
- },
- {
- "name": "getAllTimeSamplingProfile",
- "description": "Retrieve native memory allocations profile\ncollected since renderer process startup.",
- "returns": [
+ "id": "DragData",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "profile",
- "$ref": "SamplingProfile"
- }
- ]
- },
- {
- "name": "getBrowserSamplingProfile",
- "description": "Retrieve native memory allocations profile\ncollected since browser process startup.",
- "returns": [
+ "name": "items",
+ "type": "array",
+ "items": {
+ "$ref": "DragDataItem"
+ }
+ },
{
- "name": "profile",
- "$ref": "SamplingProfile"
+ "name": "files",
+ "description": "List of filenames that should be included when dropping",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "dragOperationsMask",
+ "description": "Bit field representing allowed drag operations. Copy = 1, Link = 2, Move = 16",
+ "type": "integer"
}
]
- },
+ }
+ ],
+ "commands": [
{
- "name": "getSamplingProfile",
+ "name": "dispatchDragEvent",
+ "description": "Dispatches a drag event into the page.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "type",
+ "description": "Type of the drag event.",
+ "type": "string",
+ "enum": [
+ "dragEnter",
+ "dragOver",
+ "drop",
+ "dragCancel"
+ ]
+ },
+ {
+ "name": "x",
+ "description": "X coordinate of the event relative to the main frame's viewport in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to\nthe top of the viewport and Y increases as it proceeds towards the bottom of the viewport.",
+ "type": "number"
+ },
+ {
+ "name": "data",
+ "$ref": "DragData"
+ },
+ {
+ "name": "modifiers",
+ "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "dispatchKeyEvent",
+ "description": "Dispatches a key event to the page.",
+ "parameters": [
+ {
+ "name": "type",
+ "description": "Type of the key event.",
+ "type": "string",
+ "enum": [
+ "keyDown",
+ "keyUp",
+ "rawKeyDown",
+ "char"
+ ]
+ },
+ {
+ "name": "modifiers",
+ "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "timestamp",
+ "description": "Time at which the event occurred.",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "text",
+ "description": "Text as generated by processing a virtual key code with a keyboard layout. Not needed for\nfor `keyUp` and `rawKeyDown` events (default: \"\")",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "unmodifiedText",
+ "description": "Text that would have been generated by the keyboard if no modifiers were pressed (except for\nshift). Useful for shortcut (accelerator) key handling (default: \"\").",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "keyIdentifier",
+ "description": "Unique key identifier (e.g., 'U+0041') (default: \"\").",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "code",
+ "description": "Unique DOM defined string value for each physical key (e.g., 'KeyA') (default: \"\").",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "key",
+ "description": "Unique DOM defined string value describing the meaning of the key in the context of active\nmodifiers, keyboard layout, etc (e.g., 'AltGr') (default: \"\").",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "windowsVirtualKeyCode",
+ "description": "Windows virtual key code (default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "nativeVirtualKeyCode",
+ "description": "Native virtual key code (default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "autoRepeat",
+ "description": "Whether the event was generated from auto repeat (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "isKeypad",
+ "description": "Whether the event was generated from the keypad (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "isSystemKey",
+ "description": "Whether the event was a system key event (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "location",
+ "description": "Whether the event was from the left or right side of the keyboard. 1=Left, 2=Right (default:\n0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "commands",
+ "description": "Editing commands to send with the key event (e.g., 'selectAll') (default: []).\nThese are related to but not equal the command names used in `document.execCommand` and NSStandardKeyBindingResponding.\nSee https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/editing/commands/editor_command_names.h for valid command names.",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "name": "insertText",
+ "description": "This method emulates inserting text that doesn't come from a key press,\nfor example an emoji keyboard or an IME.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "text",
+ "description": "The text to insert.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "imeSetComposition",
+ "description": "This method sets the current candidate text for ime.\nUse imeCommitComposition to commit the final text.\nUse imeSetComposition with empty string as text to cancel composition.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "text",
+ "description": "The text to insert",
+ "type": "string"
+ },
+ {
+ "name": "selectionStart",
+ "description": "selection start",
+ "type": "integer"
+ },
+ {
+ "name": "selectionEnd",
+ "description": "selection end",
+ "type": "integer"
+ },
+ {
+ "name": "replacementStart",
+ "description": "replacement start",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "replacementEnd",
+ "description": "replacement end",
+ "optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "dispatchMouseEvent",
+ "description": "Dispatches a mouse event to the page.",
+ "parameters": [
+ {
+ "name": "type",
+ "description": "Type of the mouse event.",
+ "type": "string",
+ "enum": [
+ "mousePressed",
+ "mouseReleased",
+ "mouseMoved",
+ "mouseWheel"
+ ]
+ },
+ {
+ "name": "x",
+ "description": "X coordinate of the event relative to the main frame's viewport in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the event relative to the main frame's viewport in CSS pixels. 0 refers to\nthe top of the viewport and Y increases as it proceeds towards the bottom of the viewport.",
+ "type": "number"
+ },
+ {
+ "name": "modifiers",
+ "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "timestamp",
+ "description": "Time at which the event occurred.",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "button",
+ "description": "Mouse button (default: \"none\").",
+ "optional": true,
+ "$ref": "MouseButton"
+ },
+ {
+ "name": "buttons",
+ "description": "A number indicating which buttons are pressed on the mouse when a mouse event is triggered.\nLeft=1, Right=2, Middle=4, Back=8, Forward=16, None=0.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "clickCount",
+ "description": "Number of times the mouse button was clicked (default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "force",
+ "description": "The normalized pressure, which has a range of [0,1] (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "tangentialPressure",
+ "description": "The normalized tangential pressure, which has a range of [-1,1] (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "tiltX",
+ "description": "The plane angle between the Y-Z plane and the plane containing both the stylus axis and the Y axis, in degrees of the range [-90,90], a positive tiltX is to the right (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "tiltY",
+ "description": "The plane angle between the X-Z plane and the plane containing both the stylus axis and the X axis, in degrees of the range [-90,90], a positive tiltY is towards the user (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "twist",
+ "description": "The clockwise rotation of a pen stylus around its own major axis, in degrees in the range [0,359] (default: 0).",
+ "experimental": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "deltaX",
+ "description": "X delta in CSS pixels for mouse wheel event (default: 0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "deltaY",
+ "description": "Y delta in CSS pixels for mouse wheel event (default: 0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "pointerType",
+ "description": "Pointer type (default: \"mouse\").",
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "mouse",
+ "pen"
+ ]
+ }
+ ]
+ },
+ {
+ "name": "dispatchTouchEvent",
+ "description": "Dispatches a touch event to the page.",
+ "parameters": [
+ {
+ "name": "type",
+ "description": "Type of the touch event. TouchEnd and TouchCancel must not contain any touch points, while\nTouchStart and TouchMove must contains at least one.",
+ "type": "string",
+ "enum": [
+ "touchStart",
+ "touchEnd",
+ "touchMove",
+ "touchCancel"
+ ]
+ },
+ {
+ "name": "touchPoints",
+ "description": "Active touch points on the touch device. One event per any changed point (compared to\nprevious touch event in a sequence) is generated, emulating pressing/moving/releasing points\none by one.",
+ "type": "array",
+ "items": {
+ "$ref": "TouchPoint"
+ }
+ },
+ {
+ "name": "modifiers",
+ "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "timestamp",
+ "description": "Time at which the event occurred.",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
+ }
+ ]
+ },
+ {
+ "name": "emulateTouchFromMouseEvent",
+ "description": "Emulates touch event from the mouse event parameters.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "type",
+ "description": "Type of the mouse event.",
+ "type": "string",
+ "enum": [
+ "mousePressed",
+ "mouseReleased",
+ "mouseMoved",
+ "mouseWheel"
+ ]
+ },
+ {
+ "name": "x",
+ "description": "X coordinate of the mouse pointer in DIP.",
+ "type": "integer"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the mouse pointer in DIP.",
+ "type": "integer"
+ },
+ {
+ "name": "button",
+ "description": "Mouse button. Only \"none\", \"left\", \"right\" are supported.",
+ "$ref": "MouseButton"
+ },
+ {
+ "name": "timestamp",
+ "description": "Time at which the event occurred (default: current time).",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "deltaX",
+ "description": "X delta in DIP for mouse wheel event (default: 0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "deltaY",
+ "description": "Y delta in DIP for mouse wheel event (default: 0).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "modifiers",
+ "description": "Bit field representing pressed modifier keys. Alt=1, Ctrl=2, Meta/Command=4, Shift=8\n(default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "clickCount",
+ "description": "Number of times the mouse button was clicked (default: 0).",
+ "optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "setIgnoreInputEvents",
+ "description": "Ignores input events (useful while auditing page).",
+ "parameters": [
+ {
+ "name": "ignore",
+ "description": "Ignores input events processing when set to true.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setInterceptDrags",
+ "description": "Prevents default drag and drop behavior and instead emits `Input.dragIntercepted` events.\nDrag and drop behavior can be directly controlled via `Input.dispatchDragEvent`.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "enabled",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "synthesizePinchGesture",
+ "description": "Synthesizes a pinch gesture over a time period by issuing appropriate touch events.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "x",
+ "description": "X coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "scaleFactor",
+ "description": "Relative scale factor after zooming (>1.0 zooms in, <1.0 zooms out).",
+ "type": "number"
+ },
+ {
+ "name": "relativeSpeed",
+ "description": "Relative pointer speed in pixels per second (default: 800).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "gestureSourceType",
+ "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
+ "optional": true,
+ "$ref": "GestureSourceType"
+ }
+ ]
+ },
+ {
+ "name": "synthesizeScrollGesture",
+ "description": "Synthesizes a scroll gesture over a time period by issuing appropriate touch events.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "x",
+ "description": "X coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "xDistance",
+ "description": "The distance to scroll along the X axis (positive to scroll left).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "yDistance",
+ "description": "The distance to scroll along the Y axis (positive to scroll up).",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "xOverscroll",
+ "description": "The number of additional pixels to scroll back along the X axis, in addition to the given\ndistance.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "yOverscroll",
+ "description": "The number of additional pixels to scroll back along the Y axis, in addition to the given\ndistance.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "preventFling",
+ "description": "Prevent fling (default: true).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "speed",
+ "description": "Swipe speed in pixels per second (default: 800).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "gestureSourceType",
+ "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
+ "optional": true,
+ "$ref": "GestureSourceType"
+ },
+ {
+ "name": "repeatCount",
+ "description": "The number of times to repeat the gesture (default: 0).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "repeatDelayMs",
+ "description": "The number of milliseconds delay between each repeat. (default: 250).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "interactionMarkerName",
+ "description": "The name of the interaction markers to generate, if not empty (default: \"\").",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "synthesizeTapGesture",
+ "description": "Synthesizes a tap gesture over a time period by issuing appropriate touch events.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "x",
+ "description": "X coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Y coordinate of the start of the gesture in CSS pixels.",
+ "type": "number"
+ },
+ {
+ "name": "duration",
+ "description": "Duration between touchdown and touchup events in ms (default: 50).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "tapCount",
+ "description": "Number of times to perform the tap (e.g. 2 for double tap, default: 1).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "gestureSourceType",
+ "description": "Which type of input events to be generated (default: 'default', which queries the platform\nfor the preferred input type).",
+ "optional": true,
+ "$ref": "GestureSourceType"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "dragIntercepted",
+ "description": "Emitted only when `Input.setInterceptDrags` is enabled. Use this data with `Input.dispatchDragEvent` to\nrestore normal drag and drop behavior.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "data",
+ "$ref": "DragData"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Inspector",
+ "experimental": true,
+ "commands": [
+ {
+ "name": "disable",
+ "description": "Disables inspector domain notifications."
+ },
+ {
+ "name": "enable",
+ "description": "Enables inspector domain notifications."
+ }
+ ],
+ "events": [
+ {
+ "name": "detached",
+ "description": "Fired when remote debugging connection is about to be terminated. Contains detach reason.",
+ "parameters": [
+ {
+ "name": "reason",
+ "description": "The reason why connection has been terminated.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "targetCrashed",
+ "description": "Fired when debugging target has crashed"
+ },
+ {
+ "name": "targetReloadedAfterCrash",
+ "description": "Fired when debugging target has reloaded after crash"
+ }
+ ]
+ },
+ {
+ "domain": "LayerTree",
+ "experimental": true,
+ "dependencies": [
+ "DOM"
+ ],
+ "types": [
+ {
+ "id": "LayerId",
+ "description": "Unique Layer identifier.",
+ "type": "string"
+ },
+ {
+ "id": "SnapshotId",
+ "description": "Unique snapshot identifier.",
+ "type": "string"
+ },
+ {
+ "id": "ScrollRect",
+ "description": "Rectangle where scrolling happens on the main thread.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "rect",
+ "description": "Rectangle itself.",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "type",
+ "description": "Reason for rectangle to force scrolling on the main thread",
+ "type": "string",
+ "enum": [
+ "RepaintsOnScroll",
+ "TouchEventHandler",
+ "WheelEventHandler"
+ ]
+ }
+ ]
+ },
+ {
+ "id": "StickyPositionConstraint",
+ "description": "Sticky position constraints.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "stickyBoxRect",
+ "description": "Layout rectangle of the sticky element before being shifted",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "containingBlockRect",
+ "description": "Layout rectangle of the containing block of the sticky element",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "nearestLayerShiftingStickyBox",
+ "description": "The nearest sticky layer that shifts the sticky box",
+ "optional": true,
+ "$ref": "LayerId"
+ },
+ {
+ "name": "nearestLayerShiftingContainingBlock",
+ "description": "The nearest sticky layer that shifts the containing block",
+ "optional": true,
+ "$ref": "LayerId"
+ }
+ ]
+ },
+ {
+ "id": "PictureTile",
+ "description": "Serialized fragment of layer picture along with its offset within the layer.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "x",
+ "description": "Offset from owning layer left boundary",
+ "type": "number"
+ },
+ {
+ "name": "y",
+ "description": "Offset from owning layer top boundary",
+ "type": "number"
+ },
+ {
+ "name": "picture",
+ "description": "Base64-encoded snapshot data. (Encoded as a base64 string when passed over JSON)",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "Layer",
+ "description": "Information about a compositing layer.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "layerId",
+ "description": "The unique id for this layer.",
+ "$ref": "LayerId"
+ },
+ {
+ "name": "parentLayerId",
+ "description": "The id of parent (not present for root).",
+ "optional": true,
+ "$ref": "LayerId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "The backend id for the node associated with this layer.",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ },
+ {
+ "name": "offsetX",
+ "description": "Offset from parent layer, X coordinate.",
+ "type": "number"
+ },
+ {
+ "name": "offsetY",
+ "description": "Offset from parent layer, Y coordinate.",
+ "type": "number"
+ },
+ {
+ "name": "width",
+ "description": "Layer width.",
+ "type": "number"
+ },
+ {
+ "name": "height",
+ "description": "Layer height.",
+ "type": "number"
+ },
+ {
+ "name": "transform",
+ "description": "Transformation matrix for layer, default is identity matrix",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ },
+ {
+ "name": "anchorX",
+ "description": "Transform anchor point X, absent if no transform specified",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "anchorY",
+ "description": "Transform anchor point Y, absent if no transform specified",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "anchorZ",
+ "description": "Transform anchor point Z, absent if no transform specified",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "paintCount",
+ "description": "Indicates how many time this layer has painted.",
+ "type": "integer"
+ },
+ {
+ "name": "drawsContent",
+ "description": "Indicates whether this layer hosts any content, rather than being used for\ntransform/scrolling purposes only.",
+ "type": "boolean"
+ },
+ {
+ "name": "invisible",
+ "description": "Set if layer is not visible.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "scrollRects",
+ "description": "Rectangles scrolling on main thread only.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "ScrollRect"
+ }
+ },
+ {
+ "name": "stickyPositionConstraint",
+ "description": "Sticky position constraint information",
+ "optional": true,
+ "$ref": "StickyPositionConstraint"
+ }
+ ]
+ },
+ {
+ "id": "PaintProfile",
+ "description": "Array of timings, one per paint step.",
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ ],
+ "commands": [
+ {
+ "name": "compositingReasons",
+ "description": "Provides the reasons why the given layer was composited.",
+ "parameters": [
+ {
+ "name": "layerId",
+ "description": "The id of the layer for which we want to get the reasons it was composited.",
+ "$ref": "LayerId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "compositingReasons",
+ "description": "A list of strings specifying reasons for the given layer to become composited.",
+ "deprecated": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "compositingReasonIds",
+ "description": "A list of strings specifying reason IDs for the given layer to become composited.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "name": "disable",
+ "description": "Disables compositing tree inspection."
+ },
+ {
+ "name": "enable",
+ "description": "Enables compositing tree inspection."
+ },
+ {
+ "name": "loadSnapshot",
+ "description": "Returns the snapshot identifier.",
+ "parameters": [
+ {
+ "name": "tiles",
+ "description": "An array of tiles composing the snapshot.",
+ "type": "array",
+ "items": {
+ "$ref": "PictureTile"
+ }
+ }
+ ],
+ "returns": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the snapshot.",
+ "$ref": "SnapshotId"
+ }
+ ]
+ },
+ {
+ "name": "makeSnapshot",
+ "description": "Returns the layer snapshot identifier.",
+ "parameters": [
+ {
+ "name": "layerId",
+ "description": "The id of the layer.",
+ "$ref": "LayerId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the layer snapshot.",
+ "$ref": "SnapshotId"
+ }
+ ]
+ },
+ {
+ "name": "profileSnapshot",
+ "parameters": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the layer snapshot.",
+ "$ref": "SnapshotId"
+ },
+ {
+ "name": "minRepeatCount",
+ "description": "The maximum number of times to replay the snapshot (1, if not specified).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "minDuration",
+ "description": "The minimum duration (in seconds) to replay the snapshot.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "clipRect",
+ "description": "The clip rectangle to apply when replaying the snapshot.",
+ "optional": true,
+ "$ref": "DOM.Rect"
+ }
+ ],
+ "returns": [
+ {
+ "name": "timings",
+ "description": "The array of paint profiles, one per run.",
+ "type": "array",
+ "items": {
+ "$ref": "PaintProfile"
+ }
+ }
+ ]
+ },
+ {
+ "name": "releaseSnapshot",
+ "description": "Releases layer snapshot captured by the back-end.",
+ "parameters": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the layer snapshot.",
+ "$ref": "SnapshotId"
+ }
+ ]
+ },
+ {
+ "name": "replaySnapshot",
+ "description": "Replays the layer snapshot and returns the resulting bitmap.",
+ "parameters": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the layer snapshot.",
+ "$ref": "SnapshotId"
+ },
+ {
+ "name": "fromStep",
+ "description": "The first step to replay from (replay from the very start if not specified).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "toStep",
+ "description": "The last step to replay to (replay till the end if not specified).",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "scale",
+ "description": "The scale to apply while replaying (defaults to 1).",
+ "optional": true,
+ "type": "number"
+ }
+ ],
+ "returns": [
+ {
+ "name": "dataURL",
+ "description": "A data: URL for resulting image.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "snapshotCommandLog",
+ "description": "Replays the layer snapshot and returns canvas log.",
+ "parameters": [
+ {
+ "name": "snapshotId",
+ "description": "The id of the layer snapshot.",
+ "$ref": "SnapshotId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "commandLog",
+ "description": "The array of canvas function calls.",
+ "type": "array",
+ "items": {
+ "type": "object"
+ }
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "layerPainted",
+ "parameters": [
+ {
+ "name": "layerId",
+ "description": "The id of the painted layer.",
+ "$ref": "LayerId"
+ },
+ {
+ "name": "clip",
+ "description": "Clip rectangle.",
+ "$ref": "DOM.Rect"
+ }
+ ]
+ },
+ {
+ "name": "layerTreeDidChange",
+ "parameters": [
+ {
+ "name": "layers",
+ "description": "Layer tree, absent if not in the comspositing mode.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "Layer"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Log",
+ "description": "Provides access to log entries.",
+ "dependencies": [
+ "Runtime",
+ "Network"
+ ],
+ "types": [
+ {
+ "id": "LogEntry",
+ "description": "Log entry.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "source",
+ "description": "Log entry source.",
+ "type": "string",
+ "enum": [
+ "xml",
+ "javascript",
+ "network",
+ "storage",
+ "appcache",
+ "rendering",
+ "security",
+ "deprecation",
+ "worker",
+ "violation",
+ "intervention",
+ "recommendation",
+ "other"
+ ]
+ },
+ {
+ "name": "level",
+ "description": "Log entry severity.",
+ "type": "string",
+ "enum": [
+ "verbose",
+ "info",
+ "warning",
+ "error"
+ ]
+ },
+ {
+ "name": "text",
+ "description": "Logged text.",
+ "type": "string"
+ },
+ {
+ "name": "category",
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "cors"
+ ]
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp when this entry was added.",
+ "$ref": "Runtime.Timestamp"
+ },
+ {
+ "name": "url",
+ "description": "URL of the resource if known.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "lineNumber",
+ "description": "Line number in the resource.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "stackTrace",
+ "description": "JavaScript stack trace.",
+ "optional": true,
+ "$ref": "Runtime.StackTrace"
+ },
+ {
+ "name": "networkRequestId",
+ "description": "Identifier of the network request associated with this entry.",
+ "optional": true,
+ "$ref": "Network.RequestId"
+ },
+ {
+ "name": "workerId",
+ "description": "Identifier of the worker associated with this entry.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "args",
+ "description": "Call arguments.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "Runtime.RemoteObject"
+ }
+ }
+ ]
+ },
+ {
+ "id": "ViolationSetting",
+ "description": "Violation configuration setting.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "description": "Violation type.",
+ "type": "string",
+ "enum": [
+ "longTask",
+ "longLayout",
+ "blockedEvent",
+ "blockedParser",
+ "discouragedAPIUse",
+ "handler",
+ "recurringHandler"
+ ]
+ },
+ {
+ "name": "threshold",
+ "description": "Time threshold to trigger upon.",
+ "type": "number"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "clear",
+ "description": "Clears the log."
+ },
+ {
+ "name": "disable",
+ "description": "Disables log domain, prevents further log entries from being reported to the client."
+ },
+ {
+ "name": "enable",
+ "description": "Enables log domain, sends the entries collected so far to the client by means of the\n`entryAdded` notification."
+ },
+ {
+ "name": "startViolationsReport",
+ "description": "start violation reporting.",
+ "parameters": [
+ {
+ "name": "config",
+ "description": "Configuration for violations.",
+ "type": "array",
+ "items": {
+ "$ref": "ViolationSetting"
+ }
+ }
+ ]
+ },
+ {
+ "name": "stopViolationsReport",
+ "description": "Stop violation reporting."
+ }
+ ],
+ "events": [
+ {
+ "name": "entryAdded",
+ "description": "Issued when new message was logged.",
+ "parameters": [
+ {
+ "name": "entry",
+ "description": "The entry.",
+ "$ref": "LogEntry"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Memory",
+ "experimental": true,
+ "types": [
+ {
+ "id": "PressureLevel",
+ "description": "Memory pressure level.",
+ "type": "string",
+ "enum": [
+ "moderate",
+ "critical"
+ ]
+ },
+ {
+ "id": "SamplingProfileNode",
+ "description": "Heap profile sample.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "size",
+ "description": "Size of the sampled allocation.",
+ "type": "number"
+ },
+ {
+ "name": "total",
+ "description": "Total bytes attributed to this sample.",
+ "type": "number"
+ },
+ {
+ "name": "stack",
+ "description": "Execution stack at the point of allocation.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "id": "SamplingProfile",
+ "description": "Array of heap profile samples.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "samples",
+ "type": "array",
+ "items": {
+ "$ref": "SamplingProfileNode"
+ }
+ },
+ {
+ "name": "modules",
+ "type": "array",
+ "items": {
+ "$ref": "Module"
+ }
+ }
+ ]
+ },
+ {
+ "id": "Module",
+ "description": "Executable module information",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "description": "Name of the module.",
+ "type": "string"
+ },
+ {
+ "name": "uuid",
+ "description": "UUID of the module.",
+ "type": "string"
+ },
+ {
+ "name": "baseAddress",
+ "description": "Base address where the module is loaded into memory. Encoded as a decimal\nor hexadecimal (0x prefixed) string.",
+ "type": "string"
+ },
+ {
+ "name": "size",
+ "description": "Size of the module in bytes.",
+ "type": "number"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "getDOMCounters",
+ "returns": [
+ {
+ "name": "documents",
+ "type": "integer"
+ },
+ {
+ "name": "nodes",
+ "type": "integer"
+ },
+ {
+ "name": "jsEventListeners",
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "prepareForLeakDetection"
+ },
+ {
+ "name": "forciblyPurgeJavaScriptMemory",
+ "description": "Simulate OomIntervention by purging V8 memory."
+ },
+ {
+ "name": "setPressureNotificationsSuppressed",
+ "description": "Enable/disable suppressing memory pressure notifications in all processes.",
+ "parameters": [
+ {
+ "name": "suppressed",
+ "description": "If true, memory pressure notifications will be suppressed.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "simulatePressureNotification",
+ "description": "Simulate a memory pressure notification in all processes.",
+ "parameters": [
+ {
+ "name": "level",
+ "description": "Memory pressure level of the notification.",
+ "$ref": "PressureLevel"
+ }
+ ]
+ },
+ {
+ "name": "startSampling",
+ "description": "Start collecting native memory profile.",
+ "parameters": [
+ {
+ "name": "samplingInterval",
+ "description": "Average number of bytes between samples.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "suppressRandomness",
+ "description": "Do not randomize intervals between samples.",
+ "optional": true,
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "stopSampling",
+ "description": "Stop collecting native memory profile."
+ },
+ {
+ "name": "getAllTimeSamplingProfile",
+ "description": "Retrieve native memory allocations profile\ncollected since renderer process startup.",
+ "returns": [
+ {
+ "name": "profile",
+ "$ref": "SamplingProfile"
+ }
+ ]
+ },
+ {
+ "name": "getBrowserSamplingProfile",
+ "description": "Retrieve native memory allocations profile\ncollected since browser process startup.",
+ "returns": [
+ {
+ "name": "profile",
+ "$ref": "SamplingProfile"
+ }
+ ]
+ },
+ {
+ "name": "getSamplingProfile",
"description": "Retrieve native memory allocations profile collected since last\n`startSampling` call.",
"returns": [
{
- "name": "profile",
- "$ref": "SamplingProfile"
+ "name": "profile",
+ "$ref": "SamplingProfile"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Network",
+ "description": "Network domain allows tracking network activities of the page. It exposes information about http,\nfile, data and other requests and responses, their headers, bodies, timing, etc.",
+ "dependencies": [
+ "Debugger",
+ "Runtime",
+ "Security"
+ ],
+ "types": [
+ {
+ "id": "ResourceType",
+ "description": "Resource type as it was perceived by the rendering engine.",
+ "type": "string",
+ "enum": [
+ "Document",
+ "Stylesheet",
+ "Image",
+ "Media",
+ "Font",
+ "Script",
+ "TextTrack",
+ "XHR",
+ "Fetch",
+ "EventSource",
+ "WebSocket",
+ "Manifest",
+ "SignedExchange",
+ "Ping",
+ "CSPViolationReport",
+ "Preflight",
+ "Other"
+ ]
+ },
+ {
+ "id": "LoaderId",
+ "description": "Unique loader identifier.",
+ "type": "string"
+ },
+ {
+ "id": "RequestId",
+ "description": "Unique request identifier.",
+ "type": "string"
+ },
+ {
+ "id": "InterceptionId",
+ "description": "Unique intercepted request identifier.",
+ "type": "string"
+ },
+ {
+ "id": "ErrorReason",
+ "description": "Network level fetch failure reason.",
+ "type": "string",
+ "enum": [
+ "Failed",
+ "Aborted",
+ "TimedOut",
+ "AccessDenied",
+ "ConnectionClosed",
+ "ConnectionReset",
+ "ConnectionRefused",
+ "ConnectionAborted",
+ "ConnectionFailed",
+ "NameNotResolved",
+ "InternetDisconnected",
+ "AddressUnreachable",
+ "BlockedByClient",
+ "BlockedByResponse"
+ ]
+ },
+ {
+ "id": "TimeSinceEpoch",
+ "description": "UTC time in seconds, counted from January 1, 1970.",
+ "type": "number"
+ },
+ {
+ "id": "MonotonicTime",
+ "description": "Monotonically increasing time in seconds since an arbitrary point in the past.",
+ "type": "number"
+ },
+ {
+ "id": "Headers",
+ "description": "Request / response headers as keys / values of JSON object.",
+ "type": "object"
+ },
+ {
+ "id": "ConnectionType",
+ "description": "The underlying connection technology that the browser is supposedly using.",
+ "type": "string",
+ "enum": [
+ "none",
+ "cellular2g",
+ "cellular3g",
+ "cellular4g",
+ "bluetooth",
+ "ethernet",
+ "wifi",
+ "wimax",
+ "other"
+ ]
+ },
+ {
+ "id": "CookieSameSite",
+ "description": "Represents the cookie's 'SameSite' status:\nhttps://tools.ietf.org/html/draft-west-first-party-cookies",
+ "type": "string",
+ "enum": [
+ "Strict",
+ "Lax",
+ "None"
+ ]
+ },
+ {
+ "id": "CookiePriority",
+ "description": "Represents the cookie's 'Priority' status:\nhttps://tools.ietf.org/html/draft-west-cookie-priority-00",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Low",
+ "Medium",
+ "High"
+ ]
+ },
+ {
+ "id": "CookieSourceScheme",
+ "description": "Represents the source scheme of the origin that originally set the cookie.\nA value of \"Unset\" allows protocol clients to emulate legacy cookie scope for the scheme.\nThis is a temporary ability and it will be removed in the future.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Unset",
+ "NonSecure",
+ "Secure"
+ ]
+ },
+ {
+ "id": "ResourceTiming",
+ "description": "Timing information for the request.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "requestTime",
+ "description": "Timing's requestTime is a baseline in seconds, while the other numbers are ticks in\nmilliseconds relatively to this requestTime.",
+ "type": "number"
+ },
+ {
+ "name": "proxyStart",
+ "description": "Started resolving proxy.",
+ "type": "number"
+ },
+ {
+ "name": "proxyEnd",
+ "description": "Finished resolving proxy.",
+ "type": "number"
+ },
+ {
+ "name": "dnsStart",
+ "description": "Started DNS address resolve.",
+ "type": "number"
+ },
+ {
+ "name": "dnsEnd",
+ "description": "Finished DNS address resolve.",
+ "type": "number"
+ },
+ {
+ "name": "connectStart",
+ "description": "Started connecting to the remote host.",
+ "type": "number"
+ },
+ {
+ "name": "connectEnd",
+ "description": "Connected to the remote host.",
+ "type": "number"
+ },
+ {
+ "name": "sslStart",
+ "description": "Started SSL handshake.",
+ "type": "number"
+ },
+ {
+ "name": "sslEnd",
+ "description": "Finished SSL handshake.",
+ "type": "number"
+ },
+ {
+ "name": "workerStart",
+ "description": "Started running ServiceWorker.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "workerReady",
+ "description": "Finished Starting ServiceWorker.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "workerFetchStart",
+ "description": "Started fetch event.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "workerRespondWithSettled",
+ "description": "Settled fetch event respondWith promise.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "sendStart",
+ "description": "Started sending request.",
+ "type": "number"
+ },
+ {
+ "name": "sendEnd",
+ "description": "Finished sending request.",
+ "type": "number"
+ },
+ {
+ "name": "pushStart",
+ "description": "Time the server started pushing request.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "pushEnd",
+ "description": "Time the server finished pushing request.",
+ "experimental": true,
+ "type": "number"
+ },
+ {
+ "name": "receiveHeadersEnd",
+ "description": "Finished receiving response headers.",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "id": "ResourcePriority",
+ "description": "Loading priority of a resource request.",
+ "type": "string",
+ "enum": [
+ "VeryLow",
+ "Low",
+ "Medium",
+ "High",
+ "VeryHigh"
+ ]
+ },
+ {
+ "id": "PostDataEntry",
+ "description": "Post data entry for HTTP request",
+ "type": "object",
+ "properties": [
+ {
+ "name": "bytes",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "Request",
+ "description": "HTTP request data.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "url",
+ "description": "Request URL (without fragment).",
+ "type": "string"
+ },
+ {
+ "name": "urlFragment",
+ "description": "Fragment of the requested URL starting with hash, if present.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "method",
+ "description": "HTTP request method.",
+ "type": "string"
+ },
+ {
+ "name": "headers",
+ "description": "HTTP request headers.",
+ "$ref": "Headers"
+ },
+ {
+ "name": "postData",
+ "description": "HTTP POST request data.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "hasPostData",
+ "description": "True when the request has POST data. Note that postData might still be omitted when this flag is true when the data is too long.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "postDataEntries",
+ "description": "Request body elements. This will be converted from base64 to binary",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "PostDataEntry"
+ }
+ },
+ {
+ "name": "mixedContentType",
+ "description": "The mixed content type of the request.",
+ "optional": true,
+ "$ref": "Security.MixedContentType"
+ },
+ {
+ "name": "initialPriority",
+ "description": "Priority of the resource request at the time request is sent.",
+ "$ref": "ResourcePriority"
+ },
+ {
+ "name": "referrerPolicy",
+ "description": "The referrer policy of the request, as defined in https://www.w3.org/TR/referrer-policy/",
+ "type": "string",
+ "enum": [
+ "unsafe-url",
+ "no-referrer-when-downgrade",
+ "no-referrer",
+ "origin",
+ "origin-when-cross-origin",
+ "same-origin",
+ "strict-origin",
+ "strict-origin-when-cross-origin"
+ ]
+ },
+ {
+ "name": "isLinkPreload",
+ "description": "Whether is loaded via link preload.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "trustTokenParams",
+ "description": "Set for requests when the TrustToken API is used. Contains the parameters\npassed by the developer (e.g. via \"fetch\") as understood by the backend.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "TrustTokenParams"
+ },
+ {
+ "name": "isSameSite",
+ "description": "True if this resource request is considered to be the 'same site' as the\nrequest correspondinfg to the main frame.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "id": "SignedCertificateTimestamp",
+ "description": "Details of a signed certificate timestamp (SCT).",
+ "type": "object",
+ "properties": [
+ {
+ "name": "status",
+ "description": "Validation status.",
+ "type": "string"
+ },
+ {
+ "name": "origin",
+ "description": "Origin.",
+ "type": "string"
+ },
+ {
+ "name": "logDescription",
+ "description": "Log name / description.",
+ "type": "string"
+ },
+ {
+ "name": "logId",
+ "description": "Log ID.",
+ "type": "string"
+ },
+ {
+ "name": "timestamp",
+ "description": "Issuance date. Unlike TimeSinceEpoch, this contains the number of\nmilliseconds since January 1, 1970, UTC, not the number of seconds.",
+ "type": "number"
+ },
+ {
+ "name": "hashAlgorithm",
+ "description": "Hash algorithm.",
+ "type": "string"
+ },
+ {
+ "name": "signatureAlgorithm",
+ "description": "Signature algorithm.",
+ "type": "string"
+ },
+ {
+ "name": "signatureData",
+ "description": "Signature data.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "SecurityDetails",
+ "description": "Security details about a request.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "protocol",
+ "description": "Protocol name (e.g. \"TLS 1.2\" or \"QUIC\").",
+ "type": "string"
+ },
+ {
+ "name": "keyExchange",
+ "description": "Key Exchange used by the connection, or the empty string if not applicable.",
+ "type": "string"
+ },
+ {
+ "name": "keyExchangeGroup",
+ "description": "(EC)DH group used by the connection, if applicable.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "cipher",
+ "description": "Cipher name.",
+ "type": "string"
+ },
+ {
+ "name": "mac",
+ "description": "TLS MAC. Note that AEAD ciphers do not have separate MACs.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "certificateId",
+ "description": "Certificate ID value.",
+ "$ref": "Security.CertificateId"
+ },
+ {
+ "name": "subjectName",
+ "description": "Certificate subject name.",
+ "type": "string"
+ },
+ {
+ "name": "sanList",
+ "description": "Subject Alternative Name (SAN) DNS names and IP addresses.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "issuer",
+ "description": "Name of the issuing CA.",
+ "type": "string"
+ },
+ {
+ "name": "validFrom",
+ "description": "Certificate valid from date.",
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "validTo",
+ "description": "Certificate valid to (expiration) date",
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "signedCertificateTimestampList",
+ "description": "List of signed certificate timestamps (SCTs).",
+ "type": "array",
+ "items": {
+ "$ref": "SignedCertificateTimestamp"
+ }
+ },
+ {
+ "name": "certificateTransparencyCompliance",
+ "description": "Whether the request complied with Certificate Transparency policy",
+ "$ref": "CertificateTransparencyCompliance"
}
]
- }
- ]
- },
- {
- "domain": "Network",
- "description": "Network domain allows tracking network activities of the page. It exposes information about http,\nfile, data and other requests and responses, their headers, bodies, timing, etc.",
- "dependencies": [
- "Debugger",
- "Runtime",
- "Security"
- ],
- "types": [
+ },
{
- "id": "ResourceType",
- "description": "Resource type as it was perceived by the rendering engine.",
+ "id": "CertificateTransparencyCompliance",
+ "description": "Whether the request complied with Certificate Transparency policy.",
"type": "string",
"enum": [
- "Document",
- "Stylesheet",
- "Image",
- "Media",
- "Font",
- "Script",
- "TextTrack",
- "XHR",
- "Fetch",
- "EventSource",
- "WebSocket",
- "Manifest",
- "SignedExchange",
- "Ping",
- "CSPViolationReport",
- "Other"
+ "unknown",
+ "not-compliant",
+ "compliant"
+ ]
+ },
+ {
+ "id": "BlockedReason",
+ "description": "The reason why request was blocked.",
+ "type": "string",
+ "enum": [
+ "other",
+ "csp",
+ "mixed-content",
+ "origin",
+ "inspector",
+ "subresource-filter",
+ "content-type",
+ "coep-frame-resource-needs-coep-header",
+ "coop-sandboxed-iframe-cannot-navigate-to-coop-page",
+ "corp-not-same-origin",
+ "corp-not-same-origin-after-defaulted-to-same-origin-by-coep",
+ "corp-not-same-site"
+ ]
+ },
+ {
+ "id": "CorsError",
+ "description": "The reason why request was blocked.",
+ "type": "string",
+ "enum": [
+ "DisallowedByMode",
+ "InvalidResponse",
+ "WildcardOriginNotAllowed",
+ "MissingAllowOriginHeader",
+ "MultipleAllowOriginValues",
+ "InvalidAllowOriginValue",
+ "AllowOriginMismatch",
+ "InvalidAllowCredentials",
+ "CorsDisabledScheme",
+ "PreflightInvalidStatus",
+ "PreflightDisallowedRedirect",
+ "PreflightWildcardOriginNotAllowed",
+ "PreflightMissingAllowOriginHeader",
+ "PreflightMultipleAllowOriginValues",
+ "PreflightInvalidAllowOriginValue",
+ "PreflightAllowOriginMismatch",
+ "PreflightInvalidAllowCredentials",
+ "PreflightMissingAllowExternal",
+ "PreflightInvalidAllowExternal",
+ "PreflightMissingAllowPrivateNetwork",
+ "PreflightInvalidAllowPrivateNetwork",
+ "InvalidAllowMethodsPreflightResponse",
+ "InvalidAllowHeadersPreflightResponse",
+ "MethodDisallowedByPreflightResponse",
+ "HeaderDisallowedByPreflightResponse",
+ "RedirectContainsCredentials",
+ "InsecurePrivateNetwork",
+ "InvalidPrivateNetworkAccess",
+ "UnexpectedPrivateNetworkAccess",
+ "NoCorsRedirectModeNotFollow"
+ ]
+ },
+ {
+ "id": "CorsErrorStatus",
+ "type": "object",
+ "properties": [
+ {
+ "name": "corsError",
+ "$ref": "CorsError"
+ },
+ {
+ "name": "failedParameter",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "ServiceWorkerResponseSource",
+ "description": "Source of serviceworker response.",
+ "type": "string",
+ "enum": [
+ "cache-storage",
+ "http-cache",
+ "fallback-code",
+ "network"
+ ]
+ },
+ {
+ "id": "TrustTokenParams",
+ "description": "Determines what type of Trust Token operation is executed and\ndepending on the type, some additional parameters. The values\nare specified in third_party/blink/renderer/core/fetch/trust_token.idl.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "type",
+ "$ref": "TrustTokenOperationType"
+ },
+ {
+ "name": "refreshPolicy",
+ "description": "Only set for \"token-redemption\" type and determine whether\nto request a fresh SRR or use a still valid cached SRR.",
+ "type": "string",
+ "enum": [
+ "UseCached",
+ "Refresh"
+ ]
+ },
+ {
+ "name": "issuers",
+ "description": "Origins of issuers from whom to request tokens or redemption\nrecords.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "id": "TrustTokenOperationType",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Issuance",
+ "Redemption",
+ "Signing"
+ ]
+ },
+ {
+ "id": "Response",
+ "description": "HTTP response data.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "url",
+ "description": "Response URL. This URL can be different from CachedResource.url in case of redirect.",
+ "type": "string"
+ },
+ {
+ "name": "status",
+ "description": "HTTP response status code.",
+ "type": "integer"
+ },
+ {
+ "name": "statusText",
+ "description": "HTTP response status text.",
+ "type": "string"
+ },
+ {
+ "name": "headers",
+ "description": "HTTP response headers.",
+ "$ref": "Headers"
+ },
+ {
+ "name": "headersText",
+ "description": "HTTP response headers text. This has been replaced by the headers in Network.responseReceivedExtraInfo.",
+ "deprecated": true,
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "mimeType",
+ "description": "Resource mimeType as determined by the browser.",
+ "type": "string"
+ },
+ {
+ "name": "requestHeaders",
+ "description": "Refined HTTP request headers that were actually transmitted over the network.",
+ "optional": true,
+ "$ref": "Headers"
+ },
+ {
+ "name": "requestHeadersText",
+ "description": "HTTP request headers text. This has been replaced by the headers in Network.requestWillBeSentExtraInfo.",
+ "deprecated": true,
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "connectionReused",
+ "description": "Specifies whether physical connection was actually reused for this request.",
+ "type": "boolean"
+ },
+ {
+ "name": "connectionId",
+ "description": "Physical connection id that was actually used for this request.",
+ "type": "number"
+ },
+ {
+ "name": "remoteIPAddress",
+ "description": "Remote IP address.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "remotePort",
+ "description": "Remote port.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "fromDiskCache",
+ "description": "Specifies that the request was served from the disk cache.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "fromServiceWorker",
+ "description": "Specifies that the request was served from the ServiceWorker.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "fromPrefetchCache",
+ "description": "Specifies that the request was served from the prefetch cache.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "encodedDataLength",
+ "description": "Total number of bytes received for this request so far.",
+ "type": "number"
+ },
+ {
+ "name": "timing",
+ "description": "Timing information for the given request.",
+ "optional": true,
+ "$ref": "ResourceTiming"
+ },
+ {
+ "name": "serviceWorkerResponseSource",
+ "description": "Response source of response from ServiceWorker.",
+ "optional": true,
+ "$ref": "ServiceWorkerResponseSource"
+ },
+ {
+ "name": "responseTime",
+ "description": "The time at which the returned response was generated.",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "cacheStorageCacheName",
+ "description": "Cache Storage Cache Name.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "protocol",
+ "description": "Protocol used to fetch this request.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "securityState",
+ "description": "Security state of the request resource.",
+ "$ref": "Security.SecurityState"
+ },
+ {
+ "name": "securityDetails",
+ "description": "Security details for the request.",
+ "optional": true,
+ "$ref": "SecurityDetails"
+ }
]
},
{
- "id": "LoaderId",
- "description": "Unique loader identifier.",
- "type": "string"
- },
- {
- "id": "RequestId",
- "description": "Unique request identifier.",
- "type": "string"
- },
- {
- "id": "InterceptionId",
- "description": "Unique intercepted request identifier.",
- "type": "string"
- },
- {
- "id": "ErrorReason",
- "description": "Network level fetch failure reason.",
- "type": "string",
- "enum": [
- "Failed",
- "Aborted",
- "TimedOut",
- "AccessDenied",
- "ConnectionClosed",
- "ConnectionReset",
- "ConnectionRefused",
- "ConnectionAborted",
- "ConnectionFailed",
- "NameNotResolved",
- "InternetDisconnected",
- "AddressUnreachable",
- "BlockedByClient",
- "BlockedByResponse"
+ "id": "WebSocketRequest",
+ "description": "WebSocket request data.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "headers",
+ "description": "HTTP request headers.",
+ "$ref": "Headers"
+ }
]
},
{
- "id": "TimeSinceEpoch",
- "description": "UTC time in seconds, counted from January 1, 1970.",
- "type": "number"
- },
- {
- "id": "MonotonicTime",
- "description": "Monotonically increasing time in seconds since an arbitrary point in the past.",
- "type": "number"
- },
- {
- "id": "Headers",
- "description": "Request / response headers as keys / values of JSON object.",
- "type": "object"
+ "id": "WebSocketResponse",
+ "description": "WebSocket response data.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "status",
+ "description": "HTTP response status code.",
+ "type": "integer"
+ },
+ {
+ "name": "statusText",
+ "description": "HTTP response status text.",
+ "type": "string"
+ },
+ {
+ "name": "headers",
+ "description": "HTTP response headers.",
+ "$ref": "Headers"
+ },
+ {
+ "name": "headersText",
+ "description": "HTTP response headers text.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "requestHeaders",
+ "description": "HTTP request headers.",
+ "optional": true,
+ "$ref": "Headers"
+ },
+ {
+ "name": "requestHeadersText",
+ "description": "HTTP request headers text.",
+ "optional": true,
+ "type": "string"
+ }
+ ]
},
{
- "id": "ConnectionType",
- "description": "The underlying connection technology that the browser is supposedly using.",
- "type": "string",
- "enum": [
- "none",
- "cellular2g",
- "cellular3g",
- "cellular4g",
- "bluetooth",
- "ethernet",
- "wifi",
- "wimax",
- "other"
+ "id": "WebSocketFrame",
+ "description": "WebSocket message data. This represents an entire WebSocket message, not just a fragmented frame as the name suggests.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "opcode",
+ "description": "WebSocket message opcode.",
+ "type": "number"
+ },
+ {
+ "name": "mask",
+ "description": "WebSocket message mask.",
+ "type": "boolean"
+ },
+ {
+ "name": "payloadData",
+ "description": "WebSocket message payload data.\nIf the opcode is 1, this is a text message and payloadData is a UTF-8 string.\nIf the opcode isn't 1, then payloadData is a base64 encoded string representing binary data.",
+ "type": "string"
+ }
]
},
{
- "id": "CookieSameSite",
- "description": "Represents the cookie's 'SameSite' status:\nhttps://tools.ietf.org/html/draft-west-first-party-cookies",
- "type": "string",
- "enum": [
- "Strict",
- "Lax",
- "Extended",
- "None"
+ "id": "CachedResource",
+ "description": "Information about the cached resource.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "url",
+ "description": "Resource URL. This is the url of the original network request.",
+ "type": "string"
+ },
+ {
+ "name": "type",
+ "description": "Type of this resource.",
+ "$ref": "ResourceType"
+ },
+ {
+ "name": "response",
+ "description": "Cached response data.",
+ "optional": true,
+ "$ref": "Response"
+ },
+ {
+ "name": "bodySize",
+ "description": "Cached response body size.",
+ "type": "number"
+ }
]
},
{
- "id": "ResourceTiming",
- "description": "Timing information for the request.",
+ "id": "Initiator",
+ "description": "Information about the request initiator.",
"type": "object",
"properties": [
{
- "name": "requestTime",
- "description": "Timing's requestTime is a baseline in seconds, while the other numbers are ticks in\nmilliseconds relatively to this requestTime.",
- "type": "number"
+ "name": "type",
+ "description": "Type of this initiator.",
+ "type": "string",
+ "enum": [
+ "parser",
+ "script",
+ "preload",
+ "SignedExchange",
+ "preflight",
+ "other"
+ ]
},
{
- "name": "proxyStart",
- "description": "Started resolving proxy.",
- "type": "number"
+ "name": "stack",
+ "description": "Initiator JavaScript stack trace, set for Script only.",
+ "optional": true,
+ "$ref": "Runtime.StackTrace"
},
{
- "name": "proxyEnd",
- "description": "Finished resolving proxy.",
- "type": "number"
+ "name": "url",
+ "description": "Initiator URL, set for Parser type or for Script type (when script is importing module) or for SignedExchange type.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "dnsStart",
- "description": "Started DNS address resolve.",
+ "name": "lineNumber",
+ "description": "Initiator line number, set for Parser type or for Script type (when script is importing\nmodule) (0-based).",
+ "optional": true,
"type": "number"
},
{
- "name": "dnsEnd",
- "description": "Finished DNS address resolve.",
+ "name": "columnNumber",
+ "description": "Initiator column number, set for Parser type or for Script type (when script is importing\nmodule) (0-based).",
+ "optional": true,
"type": "number"
},
{
- "name": "connectStart",
- "description": "Started connecting to the remote host.",
- "type": "number"
+ "name": "requestId",
+ "description": "Set if another request triggered this request (e.g. preflight).",
+ "optional": true,
+ "$ref": "RequestId"
+ }
+ ]
+ },
+ {
+ "id": "Cookie",
+ "description": "Cookie object",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "description": "Cookie name.",
+ "type": "string"
},
{
- "name": "connectEnd",
- "description": "Connected to the remote host.",
- "type": "number"
+ "name": "value",
+ "description": "Cookie value.",
+ "type": "string"
},
{
- "name": "sslStart",
- "description": "Started SSL handshake.",
- "type": "number"
+ "name": "domain",
+ "description": "Cookie domain.",
+ "type": "string"
},
{
- "name": "sslEnd",
- "description": "Finished SSL handshake.",
+ "name": "path",
+ "description": "Cookie path.",
+ "type": "string"
+ },
+ {
+ "name": "expires",
+ "description": "Cookie expiration date as the number of seconds since the UNIX epoch.",
"type": "number"
},
{
- "name": "workerStart",
- "description": "Started running ServiceWorker.",
- "experimental": true,
- "type": "number"
+ "name": "size",
+ "description": "Cookie size.",
+ "type": "integer"
+ },
+ {
+ "name": "httpOnly",
+ "description": "True if cookie is http-only.",
+ "type": "boolean"
+ },
+ {
+ "name": "secure",
+ "description": "True if cookie is secure.",
+ "type": "boolean"
+ },
+ {
+ "name": "session",
+ "description": "True in case of session cookie.",
+ "type": "boolean"
+ },
+ {
+ "name": "sameSite",
+ "description": "Cookie SameSite type.",
+ "optional": true,
+ "$ref": "CookieSameSite"
},
{
- "name": "workerReady",
- "description": "Finished Starting ServiceWorker.",
+ "name": "priority",
+ "description": "Cookie Priority",
"experimental": true,
- "type": "number"
+ "$ref": "CookiePriority"
},
{
- "name": "sendStart",
- "description": "Started sending request.",
- "type": "number"
+ "name": "sameParty",
+ "description": "True if cookie is SameParty.",
+ "experimental": true,
+ "type": "boolean"
},
{
- "name": "sendEnd",
- "description": "Finished sending request.",
- "type": "number"
+ "name": "sourceScheme",
+ "description": "Cookie source scheme type.",
+ "experimental": true,
+ "$ref": "CookieSourceScheme"
},
{
- "name": "pushStart",
- "description": "Time the server started pushing request.",
+ "name": "sourcePort",
+ "description": "Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port.\nAn unspecified port value allows protocol clients to emulate legacy cookie scope for the port.\nThis is a temporary ability and it will be removed in the future.",
"experimental": true,
- "type": "number"
+ "type": "integer"
},
{
- "name": "pushEnd",
- "description": "Time the server finished pushing request.",
+ "name": "partitionKey",
+ "description": "Cookie partition key. The site of the top-level URL the browser was visiting at the start\nof the request to the endpoint that set the cookie.",
"experimental": true,
- "type": "number"
+ "optional": true,
+ "type": "string"
},
{
- "name": "receiveHeadersEnd",
- "description": "Finished receiving response headers.",
- "type": "number"
+ "name": "partitionKeyOpaque",
+ "description": "True if cookie partition key is opaque.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "id": "ResourcePriority",
- "description": "Loading priority of a resource request.",
+ "id": "SetCookieBlockedReason",
+ "description": "Types of reasons why a cookie may not be stored from a response.",
+ "experimental": true,
"type": "string",
"enum": [
- "VeryLow",
- "Low",
- "Medium",
- "High",
- "VeryHigh"
+ "SecureOnly",
+ "SameSiteStrict",
+ "SameSiteLax",
+ "SameSiteUnspecifiedTreatedAsLax",
+ "SameSiteNoneInsecure",
+ "UserPreferences",
+ "SyntaxError",
+ "SchemeNotSupported",
+ "OverwriteSecure",
+ "InvalidDomain",
+ "InvalidPrefix",
+ "UnknownError",
+ "SchemefulSameSiteStrict",
+ "SchemefulSameSiteLax",
+ "SchemefulSameSiteUnspecifiedTreatedAsLax",
+ "SamePartyFromCrossPartyContext",
+ "SamePartyConflictsWithOtherAttributes",
+ "NameValuePairExceedsMaxSize"
]
},
{
- "id": "Request",
- "description": "HTTP request data.",
+ "id": "CookieBlockedReason",
+ "description": "Types of reasons why a cookie may not be sent with a request.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "SecureOnly",
+ "NotOnPath",
+ "DomainMismatch",
+ "SameSiteStrict",
+ "SameSiteLax",
+ "SameSiteUnspecifiedTreatedAsLax",
+ "SameSiteNoneInsecure",
+ "UserPreferences",
+ "UnknownError",
+ "SchemefulSameSiteStrict",
+ "SchemefulSameSiteLax",
+ "SchemefulSameSiteUnspecifiedTreatedAsLax",
+ "SamePartyFromCrossPartyContext",
+ "NameValuePairExceedsMaxSize"
+ ]
+ },
+ {
+ "id": "BlockedSetCookieWithReason",
+ "description": "A cookie which was not stored from a response with the corresponding reason.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "url",
- "description": "Request URL (without fragment).",
- "type": "string"
- },
- {
- "name": "urlFragment",
- "description": "Fragment of the requested URL starting with hash, if present.",
- "optional": true,
- "type": "string"
- },
- {
- "name": "method",
- "description": "HTTP request method.",
- "type": "string"
- },
- {
- "name": "headers",
- "description": "HTTP request headers.",
- "$ref": "Headers"
+ "name": "blockedReasons",
+ "description": "The reason(s) this cookie was blocked.",
+ "type": "array",
+ "items": {
+ "$ref": "SetCookieBlockedReason"
+ }
},
{
- "name": "postData",
- "description": "HTTP POST request data.",
- "optional": true,
+ "name": "cookieLine",
+ "description": "The string representing this individual cookie as it would appear in the header.\nThis is not the entire \"cookie\" or \"set-cookie\" header which could have multiple cookies.",
"type": "string"
},
{
- "name": "hasPostData",
- "description": "True when the request has POST data. Note that postData might still be omitted when this flag is true when the data is too long.",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "mixedContentType",
- "description": "The mixed content type of the request.",
+ "name": "cookie",
+ "description": "The cookie object which represents the cookie which was not stored. It is optional because\nsometimes complete cookie information is not available, such as in the case of parsing\nerrors.",
"optional": true,
- "$ref": "Security.MixedContentType"
- },
- {
- "name": "initialPriority",
- "description": "Priority of the resource request at the time request is sent.",
- "$ref": "ResourcePriority"
- },
+ "$ref": "Cookie"
+ }
+ ]
+ },
+ {
+ "id": "BlockedCookieWithReason",
+ "description": "A cookie with was not sent with a request with the corresponding reason.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "referrerPolicy",
- "description": "The referrer policy of the request, as defined in https://www.w3.org/TR/referrer-policy/",
- "type": "string",
- "enum": [
- "unsafe-url",
- "no-referrer-when-downgrade",
- "no-referrer",
- "origin",
- "origin-when-cross-origin",
- "same-origin",
- "strict-origin",
- "strict-origin-when-cross-origin"
- ]
+ "name": "blockedReasons",
+ "description": "The reason(s) the cookie was blocked.",
+ "type": "array",
+ "items": {
+ "$ref": "CookieBlockedReason"
+ }
},
{
- "name": "isLinkPreload",
- "description": "Whether is loaded via link preload.",
- "optional": true,
- "type": "boolean"
+ "name": "cookie",
+ "description": "The cookie object representing the cookie which was not sent.",
+ "$ref": "Cookie"
}
]
},
{
- "id": "SignedCertificateTimestamp",
- "description": "Details of a signed certificate timestamp (SCT).",
+ "id": "CookieParam",
+ "description": "Cookie parameter object",
"type": "object",
"properties": [
{
- "name": "status",
- "description": "Validation status.",
+ "name": "name",
+ "description": "Cookie name.",
"type": "string"
},
{
- "name": "origin",
- "description": "Origin.",
+ "name": "value",
+ "description": "Cookie value.",
"type": "string"
},
{
- "name": "logDescription",
- "description": "Log name / description.",
+ "name": "url",
+ "description": "The request-URI to associate with the setting of the cookie. This value can affect the\ndefault domain, path, source port, and source scheme values of the created cookie.",
+ "optional": true,
"type": "string"
},
{
- "name": "logId",
- "description": "Log ID.",
+ "name": "domain",
+ "description": "Cookie domain.",
+ "optional": true,
"type": "string"
},
{
- "name": "timestamp",
- "description": "Issuance date.",
- "$ref": "TimeSinceEpoch"
+ "name": "path",
+ "description": "Cookie path.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "hashAlgorithm",
- "description": "Hash algorithm.",
- "type": "string"
+ "name": "secure",
+ "description": "True if cookie is secure.",
+ "optional": true,
+ "type": "boolean"
},
{
- "name": "signatureAlgorithm",
- "description": "Signature algorithm.",
- "type": "string"
+ "name": "httpOnly",
+ "description": "True if cookie is http-only.",
+ "optional": true,
+ "type": "boolean"
},
{
- "name": "signatureData",
- "description": "Signature data.",
- "type": "string"
- }
- ]
- },
- {
- "id": "SecurityDetails",
- "description": "Security details about a request.",
- "type": "object",
- "properties": [
+ "name": "sameSite",
+ "description": "Cookie SameSite type.",
+ "optional": true,
+ "$ref": "CookieSameSite"
+ },
{
- "name": "protocol",
- "description": "Protocol name (e.g. \"TLS 1.2\" or \"QUIC\").",
- "type": "string"
+ "name": "expires",
+ "description": "Cookie expiration date, session cookie if not set",
+ "optional": true,
+ "$ref": "TimeSinceEpoch"
},
{
- "name": "keyExchange",
- "description": "Key Exchange used by the connection, or the empty string if not applicable.",
- "type": "string"
+ "name": "priority",
+ "description": "Cookie Priority.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "CookiePriority"
},
{
- "name": "keyExchangeGroup",
- "description": "(EC)DH group used by the connection, if applicable.",
+ "name": "sameParty",
+ "description": "True if cookie is SameParty.",
+ "experimental": true,
"optional": true,
- "type": "string"
+ "type": "boolean"
},
{
- "name": "cipher",
- "description": "Cipher name.",
- "type": "string"
+ "name": "sourceScheme",
+ "description": "Cookie source scheme type.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "CookieSourceScheme"
},
{
- "name": "mac",
- "description": "TLS MAC. Note that AEAD ciphers do not have separate MACs.",
+ "name": "sourcePort",
+ "description": "Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port.\nAn unspecified port value allows protocol clients to emulate legacy cookie scope for the port.\nThis is a temporary ability and it will be removed in the future.",
+ "experimental": true,
"optional": true,
- "type": "string"
+ "type": "integer"
},
{
- "name": "certificateId",
- "description": "Certificate ID value.",
- "$ref": "Security.CertificateId"
+ "name": "partitionKey",
+ "description": "Cookie partition key. The site of the top-level URL the browser was visiting at the start\nof the request to the endpoint that set the cookie.\nIf not set, the cookie will be set as not partitioned.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "AuthChallenge",
+ "description": "Authorization challenge for HTTP status code 401 or 407.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "source",
+ "description": "Source of the authentication challenge.",
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "Server",
+ "Proxy"
+ ]
},
{
- "name": "subjectName",
- "description": "Certificate subject name.",
+ "name": "origin",
+ "description": "Origin of the challenger.",
"type": "string"
},
{
- "name": "sanList",
- "description": "Subject Alternative Name (SAN) DNS names and IP addresses.",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- {
- "name": "issuer",
- "description": "Name of the issuing CA.",
+ "name": "scheme",
+ "description": "The authentication scheme used, such as basic or digest",
"type": "string"
},
{
- "name": "validFrom",
- "description": "Certificate valid from date.",
- "$ref": "TimeSinceEpoch"
- },
+ "name": "realm",
+ "description": "The realm of the challenge. May be empty.",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "AuthChallengeResponse",
+ "description": "Response to an AuthChallenge.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "validTo",
- "description": "Certificate valid to (expiration) date",
- "$ref": "TimeSinceEpoch"
+ "name": "response",
+ "description": "The decision on what to do in response to the authorization challenge. Default means\ndeferring to the default behavior of the net stack, which will likely either the Cancel\nauthentication or display a popup dialog box.",
+ "type": "string",
+ "enum": [
+ "Default",
+ "CancelAuth",
+ "ProvideCredentials"
+ ]
},
{
- "name": "signedCertificateTimestampList",
- "description": "List of signed certificate timestamps (SCTs).",
- "type": "array",
- "items": {
- "$ref": "SignedCertificateTimestamp"
- }
+ "name": "username",
+ "description": "The username to provide, possibly empty. Should only be set if response is\nProvideCredentials.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "certificateTransparencyCompliance",
- "description": "Whether the request complied with Certificate Transparency policy",
- "$ref": "CertificateTransparencyCompliance"
+ "name": "password",
+ "description": "The password to provide, possibly empty. Should only be set if response is\nProvideCredentials.",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "id": "CertificateTransparencyCompliance",
- "description": "Whether the request complied with Certificate Transparency policy.",
- "type": "string",
- "enum": [
- "unknown",
- "not-compliant",
- "compliant"
- ]
- },
- {
- "id": "BlockedReason",
- "description": "The reason why request was blocked.",
+ "id": "InterceptionStage",
+ "description": "Stages of the interception to begin intercepting. Request will intercept before the request is\nsent. Response will intercept after the response is received.",
+ "experimental": true,
"type": "string",
"enum": [
- "other",
- "csp",
- "mixed-content",
- "origin",
- "inspector",
- "subresource-filter",
- "content-type",
- "collapsed-by-client"
+ "Request",
+ "HeadersReceived"
]
},
{
- "id": "Response",
- "description": "HTTP response data.",
+ "id": "RequestPattern",
+ "description": "Request pattern for interception.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "url",
- "description": "Response URL. This URL can be different from CachedResource.url in case of redirect.",
+ "name": "urlPattern",
+ "description": "Wildcards (`'*'` -> zero or more, `'?'` -> exactly one) are allowed. Escape character is\nbackslash. Omitting is equivalent to `\"*\"`.",
+ "optional": true,
"type": "string"
},
{
- "name": "status",
- "description": "HTTP response status code.",
- "type": "integer"
+ "name": "resourceType",
+ "description": "If set, only requests for matching resource types will be intercepted.",
+ "optional": true,
+ "$ref": "ResourceType"
},
{
- "name": "statusText",
- "description": "HTTP response status text.",
- "type": "string"
- },
+ "name": "interceptionStage",
+ "description": "Stage at which to begin intercepting requests. Default is Request.",
+ "optional": true,
+ "$ref": "InterceptionStage"
+ }
+ ]
+ },
+ {
+ "id": "SignedExchangeSignature",
+ "description": "Information about a signed exchange signature.\nhttps://wicg.github.io/webpackage/draft-yasskin-httpbis-origin-signed-exchanges-impl.html#rfc.section.3.1",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "headers",
- "description": "HTTP response headers.",
- "$ref": "Headers"
+ "name": "label",
+ "description": "Signed exchange signature label.",
+ "type": "string"
},
{
- "name": "headersText",
- "description": "HTTP response headers text.",
- "optional": true,
+ "name": "signature",
+ "description": "The hex string of signed exchange signature.",
"type": "string"
},
{
- "name": "mimeType",
- "description": "Resource mimeType as determined by the browser.",
+ "name": "integrity",
+ "description": "Signed exchange signature integrity.",
"type": "string"
},
{
- "name": "requestHeaders",
- "description": "Refined HTTP request headers that were actually transmitted over the network.",
+ "name": "certUrl",
+ "description": "Signed exchange signature cert Url.",
"optional": true,
- "$ref": "Headers"
+ "type": "string"
},
{
- "name": "requestHeadersText",
- "description": "HTTP request headers text.",
+ "name": "certSha256",
+ "description": "The hex string of signed exchange signature cert sha256.",
"optional": true,
"type": "string"
},
{
- "name": "connectionReused",
- "description": "Specifies whether physical connection was actually reused for this request.",
- "type": "boolean"
- },
- {
- "name": "connectionId",
- "description": "Physical connection id that was actually used for this request.",
- "type": "number"
- },
- {
- "name": "remoteIPAddress",
- "description": "Remote IP address.",
- "optional": true,
+ "name": "validityUrl",
+ "description": "Signed exchange signature validity Url.",
"type": "string"
},
{
- "name": "remotePort",
- "description": "Remote port.",
- "optional": true,
+ "name": "date",
+ "description": "Signed exchange signature date.",
"type": "integer"
},
{
- "name": "fromDiskCache",
- "description": "Specifies that the request was served from the disk cache.",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "fromServiceWorker",
- "description": "Specifies that the request was served from the ServiceWorker.",
- "optional": true,
- "type": "boolean"
+ "name": "expires",
+ "description": "Signed exchange signature expires.",
+ "type": "integer"
},
{
- "name": "fromPrefetchCache",
- "description": "Specifies that the request was served from the prefetch cache.",
+ "name": "certificates",
+ "description": "The encoded certificates.",
"optional": true,
- "type": "boolean"
- },
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "id": "SignedExchangeHeader",
+ "description": "Information about a signed exchange header.\nhttps://wicg.github.io/webpackage/draft-yasskin-httpbis-origin-signed-exchanges-impl.html#cbor-representation",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "encodedDataLength",
- "description": "Total number of bytes received for this request so far.",
- "type": "number"
+ "name": "requestUrl",
+ "description": "Signed exchange request URL.",
+ "type": "string"
},
{
- "name": "timing",
- "description": "Timing information for the given request.",
- "optional": true,
- "$ref": "ResourceTiming"
+ "name": "responseCode",
+ "description": "Signed exchange response code.",
+ "type": "integer"
},
{
- "name": "protocol",
- "description": "Protocol used to fetch this request.",
- "optional": true,
- "type": "string"
+ "name": "responseHeaders",
+ "description": "Signed exchange response headers.",
+ "$ref": "Headers"
},
{
- "name": "securityState",
- "description": "Security state of the request resource.",
- "$ref": "Security.SecurityState"
+ "name": "signatures",
+ "description": "Signed exchange response signature.",
+ "type": "array",
+ "items": {
+ "$ref": "SignedExchangeSignature"
+ }
},
{
- "name": "securityDetails",
- "description": "Security details for the request.",
- "optional": true,
- "$ref": "SecurityDetails"
+ "name": "headerIntegrity",
+ "description": "Signed exchange header integrity hash in the form of \"sha256-\".",
+ "type": "string"
}
]
},
{
- "id": "WebSocketRequest",
- "description": "WebSocket request data.",
+ "id": "SignedExchangeErrorField",
+ "description": "Field type for a signed exchange related error.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "signatureSig",
+ "signatureIntegrity",
+ "signatureCertUrl",
+ "signatureCertSha256",
+ "signatureValidityUrl",
+ "signatureTimestamps"
+ ]
+ },
+ {
+ "id": "SignedExchangeError",
+ "description": "Information about a signed exchange response.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "headers",
- "description": "HTTP request headers.",
- "$ref": "Headers"
+ "name": "message",
+ "description": "Error message.",
+ "type": "string"
+ },
+ {
+ "name": "signatureIndex",
+ "description": "The index of the signature which caused the error.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "errorField",
+ "description": "The field which caused the error.",
+ "optional": true,
+ "$ref": "SignedExchangeErrorField"
}
]
},
{
- "id": "WebSocketResponse",
- "description": "WebSocket response data.",
+ "id": "SignedExchangeInfo",
+ "description": "Information about a signed exchange response.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "status",
- "description": "HTTP response status code.",
- "type": "integer"
- },
- {
- "name": "statusText",
- "description": "HTTP response status text.",
- "type": "string"
- },
- {
- "name": "headers",
- "description": "HTTP response headers.",
- "$ref": "Headers"
+ "name": "outerResponse",
+ "description": "The outer response of signed HTTP exchange which was received from network.",
+ "$ref": "Response"
},
{
- "name": "headersText",
- "description": "HTTP response headers text.",
+ "name": "header",
+ "description": "Information about the signed exchange header.",
"optional": true,
- "type": "string"
+ "$ref": "SignedExchangeHeader"
},
{
- "name": "requestHeaders",
- "description": "HTTP request headers.",
+ "name": "securityDetails",
+ "description": "Security details for the signed exchange header.",
"optional": true,
- "$ref": "Headers"
+ "$ref": "SecurityDetails"
},
{
- "name": "requestHeadersText",
- "description": "HTTP request headers text.",
+ "name": "errors",
+ "description": "Errors occurred while handling the signed exchagne.",
"optional": true,
- "type": "string"
+ "type": "array",
+ "items": {
+ "$ref": "SignedExchangeError"
+ }
}
]
},
{
- "id": "WebSocketFrame",
- "description": "WebSocket message data. This represents an entire WebSocket message, not just a fragmented frame as the name suggests.",
+ "id": "ContentEncoding",
+ "description": "List of content encodings supported by the backend.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "deflate",
+ "gzip",
+ "br"
+ ]
+ },
+ {
+ "id": "PrivateNetworkRequestPolicy",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Allow",
+ "BlockFromInsecureToMorePrivate",
+ "WarnFromInsecureToMorePrivate",
+ "PreflightBlock",
+ "PreflightWarn"
+ ]
+ },
+ {
+ "id": "IPAddressSpace",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Local",
+ "Private",
+ "Public",
+ "Unknown"
+ ]
+ },
+ {
+ "id": "ConnectTiming",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "opcode",
- "description": "WebSocket message opcode.",
+ "name": "requestTime",
+ "description": "Timing's requestTime is a baseline in seconds, while the other numbers are ticks in\nmilliseconds relatively to this requestTime. Matches ResourceTiming's requestTime for\nthe same request (but not for redirected requests).",
"type": "number"
- },
- {
- "name": "mask",
- "description": "WebSocket message mask.",
- "type": "boolean"
- },
- {
- "name": "payloadData",
- "description": "WebSocket message payload data.\nIf the opcode is 1, this is a text message and payloadData is a UTF-8 string.\nIf the opcode isn't 1, then payloadData is a base64 encoded string representing binary data.",
- "type": "string"
}
]
},
{
- "id": "CachedResource",
- "description": "Information about the cached resource.",
+ "id": "ClientSecurityState",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "url",
- "description": "Resource URL. This is the url of the original network request.",
- "type": "string"
- },
- {
- "name": "type",
- "description": "Type of this resource.",
- "$ref": "ResourceType"
+ "name": "initiatorIsSecureContext",
+ "type": "boolean"
},
{
- "name": "response",
- "description": "Cached response data.",
- "optional": true,
- "$ref": "Response"
+ "name": "initiatorIPAddressSpace",
+ "$ref": "IPAddressSpace"
},
{
- "name": "bodySize",
- "description": "Cached response body size.",
- "type": "number"
+ "name": "privateNetworkRequestPolicy",
+ "$ref": "PrivateNetworkRequestPolicy"
}
]
},
{
- "id": "Initiator",
- "description": "Information about the request initiator.",
+ "id": "CrossOriginOpenerPolicyValue",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "SameOrigin",
+ "SameOriginAllowPopups",
+ "UnsafeNone",
+ "SameOriginPlusCoep",
+ "SameOriginAllowPopupsPlusCoep"
+ ]
+ },
+ {
+ "id": "CrossOriginOpenerPolicyStatus",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "type",
- "description": "Type of this initiator.",
- "type": "string",
- "enum": [
- "parser",
- "script",
- "preload",
- "SignedExchange",
- "other"
- ]
+ "name": "value",
+ "$ref": "CrossOriginOpenerPolicyValue"
},
{
- "name": "stack",
- "description": "Initiator JavaScript stack trace, set for Script only.",
- "optional": true,
- "$ref": "Runtime.StackTrace"
+ "name": "reportOnlyValue",
+ "$ref": "CrossOriginOpenerPolicyValue"
},
{
- "name": "url",
- "description": "Initiator URL, set for Parser type or for Script type (when script is importing module) or for SignedExchange type.",
+ "name": "reportingEndpoint",
"optional": true,
"type": "string"
},
{
- "name": "lineNumber",
- "description": "Initiator line number, set for Parser type or for Script type (when script is importing\nmodule) (0-based).",
+ "name": "reportOnlyReportingEndpoint",
"optional": true,
- "type": "number"
+ "type": "string"
}
]
},
{
- "id": "Cookie",
- "description": "Cookie object",
+ "id": "CrossOriginEmbedderPolicyValue",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "None",
+ "Credentialless",
+ "RequireCorp"
+ ]
+ },
+ {
+ "id": "CrossOriginEmbedderPolicyStatus",
+ "experimental": true,
"type": "object",
"properties": [
- {
- "name": "name",
- "description": "Cookie name.",
- "type": "string"
- },
{
"name": "value",
- "description": "Cookie value.",
- "type": "string"
+ "$ref": "CrossOriginEmbedderPolicyValue"
},
{
- "name": "domain",
- "description": "Cookie domain.",
- "type": "string"
+ "name": "reportOnlyValue",
+ "$ref": "CrossOriginEmbedderPolicyValue"
},
{
- "name": "path",
- "description": "Cookie path.",
+ "name": "reportingEndpoint",
+ "optional": true,
"type": "string"
},
{
- "name": "expires",
- "description": "Cookie expiration date as the number of seconds since the UNIX epoch.",
- "type": "number"
- },
- {
- "name": "size",
- "description": "Cookie size.",
- "type": "integer"
- },
- {
- "name": "httpOnly",
- "description": "True if cookie is http-only.",
- "type": "boolean"
- },
- {
- "name": "secure",
- "description": "True if cookie is secure.",
- "type": "boolean"
- },
+ "name": "reportOnlyReportingEndpoint",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "SecurityIsolationStatus",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "session",
- "description": "True in case of session cookie.",
- "type": "boolean"
+ "name": "coop",
+ "optional": true,
+ "$ref": "CrossOriginOpenerPolicyStatus"
},
{
- "name": "sameSite",
- "description": "Cookie SameSite type.",
+ "name": "coep",
"optional": true,
- "$ref": "CookieSameSite"
+ "$ref": "CrossOriginEmbedderPolicyStatus"
}
]
},
{
- "id": "SetCookieBlockedReason",
- "description": "Types of reasons why a cookie may not be stored from a response.",
+ "id": "ReportStatus",
+ "description": "The status of a Reporting API report.",
"experimental": true,
"type": "string",
"enum": [
- "SecureOnly",
- "SameSiteStrict",
- "SameSiteLax",
- "SameSiteExtended",
- "SameSiteUnspecifiedTreatedAsLax",
- "SameSiteNoneInsecure",
- "UserPreferences",
- "SyntaxError",
- "SchemeNotSupported",
- "OverwriteSecure",
- "InvalidDomain",
- "InvalidPrefix",
- "UnknownError"
+ "Queued",
+ "Pending",
+ "MarkedForRemoval",
+ "Success"
]
},
{
- "id": "CookieBlockedReason",
- "description": "Types of reasons why a cookie may not be sent with a request.",
+ "id": "ReportId",
"experimental": true,
- "type": "string",
- "enum": [
- "SecureOnly",
- "NotOnPath",
- "DomainMismatch",
- "SameSiteStrict",
- "SameSiteLax",
- "SameSiteExtended",
- "SameSiteUnspecifiedTreatedAsLax",
- "SameSiteNoneInsecure",
- "UserPreferences",
- "UnknownError"
- ]
+ "type": "string"
},
{
- "id": "BlockedSetCookieWithReason",
- "description": "A cookie which was not stored from a response with the corresponding reason.",
+ "id": "ReportingApiReport",
+ "description": "An object representing a report generated by the Reporting API.",
"experimental": true,
"type": "object",
"properties": [
{
- "name": "blockedReason",
- "description": "The reason this cookie was blocked.",
- "$ref": "SetCookieBlockedReason"
+ "name": "id",
+ "$ref": "ReportId"
},
{
- "name": "cookieLine",
- "description": "The string representing this individual cookie as it would appear in the header.\nThis is not the entire \"cookie\" or \"set-cookie\" header which could have multiple cookies.",
+ "name": "initiatorUrl",
+ "description": "The URL of the document that triggered the report.",
"type": "string"
},
{
- "name": "cookie",
- "description": "The cookie object which represents the cookie which was not stored. It is optional because\nsometimes complete cookie information is not available, such as in the case of parsing\nerrors.",
- "optional": true,
- "$ref": "Cookie"
+ "name": "destination",
+ "description": "The name of the endpoint group that should be used to deliver the report.",
+ "type": "string"
+ },
+ {
+ "name": "type",
+ "description": "The type of the report (specifies the set of data that is contained in the report body).",
+ "type": "string"
+ },
+ {
+ "name": "timestamp",
+ "description": "When the report was generated.",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "depth",
+ "description": "How many uploads deep the related request was.",
+ "type": "integer"
+ },
+ {
+ "name": "completedAttempts",
+ "description": "The number of delivery attempts made so far, not including an active attempt.",
+ "type": "integer"
+ },
+ {
+ "name": "body",
+ "type": "object"
+ },
+ {
+ "name": "status",
+ "$ref": "ReportStatus"
}
]
},
{
- "id": "BlockedCookieWithReason",
- "description": "A cookie with was not sent with a request with the corresponding reason.",
+ "id": "ReportingApiEndpoint",
"experimental": true,
"type": "object",
"properties": [
{
- "name": "blockedReason",
- "description": "The reason the cookie was blocked.",
- "$ref": "CookieBlockedReason"
+ "name": "url",
+ "description": "The URL of the endpoint to which reports may be delivered.",
+ "type": "string"
},
{
- "name": "cookie",
- "description": "The cookie object representing the cookie which was not sent.",
- "$ref": "Cookie"
+ "name": "groupName",
+ "description": "Name of the endpoint group.",
+ "type": "string"
}
]
},
{
- "id": "CookieParam",
- "description": "Cookie parameter object",
+ "id": "LoadNetworkResourcePageResult",
+ "description": "An object providing the result of a network resource load.",
+ "experimental": true,
"type": "object",
"properties": [
{
- "name": "name",
- "description": "Cookie name.",
- "type": "string"
- },
- {
- "name": "value",
- "description": "Cookie value.",
- "type": "string"
- },
- {
- "name": "url",
- "description": "The request-URI to associate with the setting of the cookie. This value can affect the\ndefault domain and path values of the created cookie.",
- "optional": true,
- "type": "string"
+ "name": "success",
+ "type": "boolean"
},
{
- "name": "domain",
- "description": "Cookie domain.",
+ "name": "netError",
+ "description": "Optional values used for error reporting.",
"optional": true,
- "type": "string"
+ "type": "number"
},
{
- "name": "path",
- "description": "Cookie path.",
+ "name": "netErrorName",
"optional": true,
"type": "string"
},
{
- "name": "secure",
- "description": "True if cookie is secure.",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "httpOnly",
- "description": "True if cookie is http-only.",
+ "name": "httpStatusCode",
"optional": true,
- "type": "boolean"
+ "type": "number"
},
{
- "name": "sameSite",
- "description": "Cookie SameSite type.",
+ "name": "stream",
+ "description": "If successful, one of the following two fields holds the result.",
"optional": true,
- "$ref": "CookieSameSite"
+ "$ref": "IO.StreamHandle"
},
{
- "name": "expires",
- "description": "Cookie expiration date, session cookie if not set",
+ "name": "headers",
+ "description": "Response headers.",
"optional": true,
- "$ref": "TimeSinceEpoch"
+ "$ref": "Network.Headers"
}
]
},
{
- "id": "AuthChallenge",
- "description": "Authorization challenge for HTTP status code 401 or 407.",
+ "id": "LoadNetworkResourceOptions",
+ "description": "An options object that may be extended later to better support CORS,\nCORB and streaming.",
"experimental": true,
"type": "object",
"properties": [
{
- "name": "source",
- "description": "Source of the authentication challenge.",
- "optional": true,
- "type": "string",
- "enum": [
- "Server",
- "Proxy"
- ]
+ "name": "disableCache",
+ "type": "boolean"
},
{
- "name": "origin",
- "description": "Origin of the challenger.",
- "type": "string"
- },
+ "name": "includeCredentials",
+ "type": "boolean"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "setAcceptedEncodings",
+ "description": "Sets a list of content encodings that will be accepted. Empty list means no encoding is accepted.",
+ "experimental": true,
+ "parameters": [
{
- "name": "scheme",
- "description": "The authentication scheme used, such as basic or digest",
- "type": "string"
- },
+ "name": "encodings",
+ "description": "List of accepted content encodings.",
+ "type": "array",
+ "items": {
+ "$ref": "ContentEncoding"
+ }
+ }
+ ]
+ },
+ {
+ "name": "clearAcceptedEncodingsOverride",
+ "description": "Clears accepted encodings set by setAcceptedEncodings",
+ "experimental": true
+ },
+ {
+ "name": "canClearBrowserCache",
+ "description": "Tells whether clearing browser cache is supported.",
+ "deprecated": true,
+ "returns": [
{
- "name": "realm",
- "description": "The realm of the challenge. May be empty.",
- "type": "string"
+ "name": "result",
+ "description": "True if browser cache can be cleared.",
+ "type": "boolean"
}
]
},
{
- "id": "AuthChallengeResponse",
- "description": "Response to an AuthChallenge.",
+ "name": "canClearBrowserCookies",
+ "description": "Tells whether clearing browser cookies is supported.",
+ "deprecated": true,
+ "returns": [
+ {
+ "name": "result",
+ "description": "True if browser cookies can be cleared.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "canEmulateNetworkConditions",
+ "description": "Tells whether emulation of network conditions is supported.",
+ "deprecated": true,
+ "returns": [
+ {
+ "name": "result",
+ "description": "True if emulation of network conditions is supported.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "clearBrowserCache",
+ "description": "Clears browser cache."
+ },
+ {
+ "name": "clearBrowserCookies",
+ "description": "Clears browser cookies."
+ },
+ {
+ "name": "continueInterceptedRequest",
+ "description": "Response to Network.requestIntercepted which either modifies the request to continue with any\nmodifications, or blocks it, or completes it with the provided response bytes. If a network\nfetch occurs as a result which encounters a redirect an additional Network.requestIntercepted\nevent will be sent with the same InterceptionId.\nDeprecated, use Fetch.continueRequest, Fetch.fulfillRequest and Fetch.failRequest instead.",
"experimental": true,
- "type": "object",
- "properties": [
+ "deprecated": true,
+ "parameters": [
{
- "name": "response",
- "description": "The decision on what to do in response to the authorization challenge. Default means\ndeferring to the default behavior of the net stack, which will likely either the Cancel\nauthentication or display a popup dialog box.",
- "type": "string",
- "enum": [
- "Default",
- "CancelAuth",
- "ProvideCredentials"
- ]
+ "name": "interceptionId",
+ "$ref": "InterceptionId"
},
{
- "name": "username",
- "description": "The username to provide, possibly empty. Should only be set if response is\nProvideCredentials.",
+ "name": "errorReason",
+ "description": "If set this causes the request to fail with the given reason. Passing `Aborted` for requests\nmarked with `isNavigationRequest` also cancels the navigation. Must not be set in response\nto an authChallenge.",
+ "optional": true,
+ "$ref": "ErrorReason"
+ },
+ {
+ "name": "rawResponse",
+ "description": "If set the requests completes using with the provided base64 encoded raw response, including\nHTTP status line and headers etc... Must not be set in response to an authChallenge. (Encoded as a base64 string when passed over JSON)",
"optional": true,
"type": "string"
},
{
- "name": "password",
- "description": "The password to provide, possibly empty. Should only be set if response is\nProvideCredentials.",
+ "name": "url",
+ "description": "If set the request url will be modified in a way that's not observable by page. Must not be\nset in response to an authChallenge.",
"optional": true,
"type": "string"
- }
- ]
- },
- {
- "id": "InterceptionStage",
- "description": "Stages of the interception to begin intercepting. Request will intercept before the request is\nsent. Response will intercept after the response is received.",
- "experimental": true,
- "type": "string",
- "enum": [
- "Request",
- "HeadersReceived"
- ]
- },
- {
- "id": "RequestPattern",
- "description": "Request pattern for interception.",
- "experimental": true,
- "type": "object",
- "properties": [
+ },
{
- "name": "urlPattern",
- "description": "Wildcards ('*' -> zero or more, '?' -> exactly one) are allowed. Escape character is\nbackslash. Omitting is equivalent to \"*\".",
+ "name": "method",
+ "description": "If set this allows the request method to be overridden. Must not be set in response to an\nauthChallenge.",
"optional": true,
"type": "string"
},
{
- "name": "resourceType",
- "description": "If set, only requests for matching resource types will be intercepted.",
+ "name": "postData",
+ "description": "If set this allows postData to be set. Must not be set in response to an authChallenge.",
"optional": true,
- "$ref": "ResourceType"
+ "type": "string"
},
{
- "name": "interceptionStage",
- "description": "Stage at wich to begin intercepting requests. Default is Request.",
+ "name": "headers",
+ "description": "If set this allows the request headers to be changed. Must not be set in response to an\nauthChallenge.",
"optional": true,
- "$ref": "InterceptionStage"
+ "$ref": "Headers"
+ },
+ {
+ "name": "authChallengeResponse",
+ "description": "Response to a requestIntercepted with an authChallenge. Must not be set otherwise.",
+ "optional": true,
+ "$ref": "AuthChallengeResponse"
}
]
},
{
- "id": "SignedExchangeSignature",
- "description": "Information about a signed exchange signature.\nhttps://wicg.github.io/webpackage/draft-yasskin-httpbis-origin-signed-exchanges-impl.html#rfc.section.3.1",
- "experimental": true,
- "type": "object",
- "properties": [
+ "name": "deleteCookies",
+ "description": "Deletes browser cookies with matching name and url or domain/path pair.",
+ "parameters": [
{
- "name": "label",
- "description": "Signed exchange signature label.",
+ "name": "name",
+ "description": "Name of the cookies to remove.",
"type": "string"
},
{
- "name": "signature",
- "description": "The hex string of signed exchange signature.",
+ "name": "url",
+ "description": "If specified, deletes all the cookies with the given name where domain and path match\nprovided URL.",
+ "optional": true,
"type": "string"
},
{
- "name": "integrity",
- "description": "Signed exchange signature integrity.",
+ "name": "domain",
+ "description": "If specified, deletes only cookies with the exact domain.",
+ "optional": true,
"type": "string"
},
{
- "name": "certUrl",
- "description": "Signed exchange signature cert Url.",
+ "name": "path",
+ "description": "If specified, deletes only cookies with the exact path.",
"optional": true,
"type": "string"
+ }
+ ]
+ },
+ {
+ "name": "disable",
+ "description": "Disables network tracking, prevents network events from being sent to the client."
+ },
+ {
+ "name": "emulateNetworkConditions",
+ "description": "Activates emulation of network conditions.",
+ "parameters": [
+ {
+ "name": "offline",
+ "description": "True to emulate internet disconnection.",
+ "type": "boolean"
},
{
- "name": "certSha256",
- "description": "The hex string of signed exchange signature cert sha256.",
- "optional": true,
- "type": "string"
+ "name": "latency",
+ "description": "Minimum latency from request sent to response headers received (ms).",
+ "type": "number"
+ },
+ {
+ "name": "downloadThroughput",
+ "description": "Maximal aggregated download throughput (bytes/sec). -1 disables download throttling.",
+ "type": "number"
},
{
- "name": "validityUrl",
- "description": "Signed exchange signature validity Url.",
- "type": "string"
+ "name": "uploadThroughput",
+ "description": "Maximal aggregated upload throughput (bytes/sec). -1 disables upload throttling.",
+ "type": "number"
},
{
- "name": "date",
- "description": "Signed exchange signature date.",
+ "name": "connectionType",
+ "description": "Connection type if known.",
+ "optional": true,
+ "$ref": "ConnectionType"
+ }
+ ]
+ },
+ {
+ "name": "enable",
+ "description": "Enables network tracking, network events will now be delivered to the client.",
+ "parameters": [
+ {
+ "name": "maxTotalBufferSize",
+ "description": "Buffer size in bytes to use when preserving network payloads (XHRs, etc).",
+ "experimental": true,
+ "optional": true,
"type": "integer"
},
{
- "name": "expires",
- "description": "Signed exchange signature expires.",
+ "name": "maxResourceBufferSize",
+ "description": "Per-resource buffer size in bytes to use when preserving network payloads (XHRs, etc).",
+ "experimental": true,
+ "optional": true,
"type": "integer"
},
{
- "name": "certificates",
- "description": "The encoded certificates.",
+ "name": "maxPostDataSize",
+ "description": "Longest post body size (in bytes) that would be included in requestWillBeSent notification",
"optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "getAllCookies",
+ "description": "Returns all browser cookies. Depending on the backend support, will return detailed cookie\ninformation in the `cookies` field.",
+ "returns": [
+ {
+ "name": "cookies",
+ "description": "Array of cookie objects.",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "Cookie"
}
}
]
},
{
- "id": "SignedExchangeHeader",
- "description": "Information about a signed exchange header.\nhttps://wicg.github.io/webpackage/draft-yasskin-httpbis-origin-signed-exchanges-impl.html#cbor-representation",
+ "name": "getCertificate",
+ "description": "Returns the DER-encoded certificate.",
"experimental": true,
- "type": "object",
- "properties": [
+ "parameters": [
{
- "name": "requestUrl",
- "description": "Signed exchange request URL.",
+ "name": "origin",
+ "description": "Origin to get certificate for.",
"type": "string"
- },
+ }
+ ],
+ "returns": [
{
- "name": "responseCode",
- "description": "Signed exchange response code.",
- "type": "integer"
- },
+ "name": "tableNames",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ {
+ "name": "getCookies",
+ "description": "Returns all browser cookies for the current URL. Depending on the backend support, will return\ndetailed cookie information in the `cookies` field.",
+ "parameters": [
{
- "name": "responseHeaders",
- "description": "Signed exchange response headers.",
- "$ref": "Headers"
- },
+ "name": "urls",
+ "description": "The list of URLs for which applicable cookies will be fetched.\nIf not specified, it's assumed to be set to the list containing\nthe URLs of the page and all of its subframes.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ],
+ "returns": [
{
- "name": "signatures",
- "description": "Signed exchange response signature.",
+ "name": "cookies",
+ "description": "Array of cookie objects.",
"type": "array",
"items": {
- "$ref": "SignedExchangeSignature"
+ "$ref": "Cookie"
}
- },
+ }
+ ]
+ },
+ {
+ "name": "getResponseBody",
+ "description": "Returns content served for the given request.",
+ "parameters": [
{
- "name": "headerIntegrity",
- "description": "Signed exchange header integrity hash in the form of \"sha256-\".",
+ "name": "requestId",
+ "description": "Identifier of the network request to get content for.",
+ "$ref": "RequestId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "body",
+ "description": "Response body.",
"type": "string"
+ },
+ {
+ "name": "base64Encoded",
+ "description": "True, if content was sent as base64.",
+ "type": "boolean"
}
]
},
{
- "id": "SignedExchangeErrorField",
- "description": "Field type for a signed exchange related error.",
- "experimental": true,
- "type": "string",
- "enum": [
- "signatureSig",
- "signatureIntegrity",
- "signatureCertUrl",
- "signatureCertSha256",
- "signatureValidityUrl",
- "signatureTimestamps"
+ "name": "getRequestPostData",
+ "description": "Returns post data sent with the request. Returns an error when no data was sent with the request.",
+ "parameters": [
+ {
+ "name": "requestId",
+ "description": "Identifier of the network request to get content for.",
+ "$ref": "RequestId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "postData",
+ "description": "Request body string, omitting files from multipart requests",
+ "type": "string"
+ }
]
},
{
- "id": "SignedExchangeError",
- "description": "Information about a signed exchange response.",
+ "name": "getResponseBodyForInterception",
+ "description": "Returns content served for the given currently intercepted request.",
"experimental": true,
- "type": "object",
- "properties": [
+ "parameters": [
{
- "name": "message",
- "description": "Error message.",
+ "name": "interceptionId",
+ "description": "Identifier for the intercepted request to get body for.",
+ "$ref": "InterceptionId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "body",
+ "description": "Response body.",
"type": "string"
},
{
- "name": "signatureIndex",
- "description": "The index of the signature which caused the error.",
- "optional": true,
- "type": "integer"
- },
+ "name": "base64Encoded",
+ "description": "True, if content was sent as base64.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "takeResponseBodyForInterceptionAsStream",
+ "description": "Returns a handle to the stream representing the response body. Note that after this command,\nthe intercepted request can't be continued as is -- you either need to cancel it or to provide\nthe response body. The stream only supports sequential read, IO.read will fail if the position\nis specified.",
+ "experimental": true,
+ "parameters": [
{
- "name": "errorField",
- "description": "The field which caused the error.",
- "optional": true,
- "$ref": "SignedExchangeErrorField"
+ "name": "interceptionId",
+ "$ref": "InterceptionId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "stream",
+ "$ref": "IO.StreamHandle"
}
]
},
{
- "id": "SignedExchangeInfo",
- "description": "Information about a signed exchange response.",
+ "name": "replayXHR",
+ "description": "This method sends a new XMLHttpRequest which is identical to the original one. The following\nparameters should be identical: method, url, async, request body, extra headers, withCredentials\nattribute, user, password.",
"experimental": true,
- "type": "object",
- "properties": [
+ "parameters": [
{
- "name": "outerResponse",
- "description": "The outer response of signed HTTP exchange which was received from network.",
- "$ref": "Response"
+ "name": "requestId",
+ "description": "Identifier of XHR to replay.",
+ "$ref": "RequestId"
+ }
+ ]
+ },
+ {
+ "name": "searchInResponseBody",
+ "description": "Searches for given string in response content.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "requestId",
+ "description": "Identifier of the network response to search.",
+ "$ref": "RequestId"
},
{
- "name": "header",
- "description": "Information about the signed exchange header.",
- "optional": true,
- "$ref": "SignedExchangeHeader"
+ "name": "query",
+ "description": "String to search for.",
+ "type": "string"
},
{
- "name": "securityDetails",
- "description": "Security details for the signed exchange header.",
+ "name": "caseSensitive",
+ "description": "If true, search is case sensitive.",
"optional": true,
- "$ref": "SecurityDetails"
+ "type": "boolean"
},
{
- "name": "errors",
- "description": "Errors occurred while handling the signed exchagne.",
+ "name": "isRegex",
+ "description": "If true, treats string parameter as regex.",
"optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
+ {
+ "name": "result",
+ "description": "List of search matches.",
"type": "array",
"items": {
- "$ref": "SignedExchangeError"
+ "$ref": "Debugger.SearchMatch"
}
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "canClearBrowserCache",
- "description": "Tells whether clearing browser cache is supported.",
- "deprecated": true,
- "returns": [
+ "name": "setBlockedURLs",
+ "description": "Blocks URLs from loading.",
+ "experimental": true,
+ "parameters": [
{
- "name": "result",
- "description": "True if browser cache can be cleared.",
- "type": "boolean"
+ "name": "urls",
+ "description": "URL patterns to block. Wildcards ('*') are allowed.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
]
},
{
- "name": "canClearBrowserCookies",
- "description": "Tells whether clearing browser cookies is supported.",
- "deprecated": true,
- "returns": [
+ "name": "setBypassServiceWorker",
+ "description": "Toggles ignoring of service worker for each request.",
+ "experimental": true,
+ "parameters": [
{
- "name": "result",
- "description": "True if browser cookies can be cleared.",
+ "name": "bypass",
+ "description": "Bypass service worker and load from network.",
"type": "boolean"
}
]
},
{
- "name": "canEmulateNetworkConditions",
- "description": "Tells whether emulation of network conditions is supported.",
- "deprecated": true,
- "returns": [
+ "name": "setCacheDisabled",
+ "description": "Toggles ignoring cache for each request. If `true`, cache will not be used.",
+ "parameters": [
{
- "name": "result",
- "description": "True if emulation of network conditions is supported.",
+ "name": "cacheDisabled",
+ "description": "Cache disabled state.",
"type": "boolean"
}
]
},
{
- "name": "clearBrowserCache",
- "description": "Clears browser cache."
- },
- {
- "name": "clearBrowserCookies",
- "description": "Clears browser cookies."
- },
- {
- "name": "continueInterceptedRequest",
- "description": "Response to Network.requestIntercepted which either modifies the request to continue with any\nmodifications, or blocks it, or completes it with the provided response bytes. If a network\nfetch occurs as a result which encounters a redirect an additional Network.requestIntercepted\nevent will be sent with the same InterceptionId.\nDeprecated, use Fetch.continueRequest, Fetch.fulfillRequest and Fetch.failRequest instead.",
- "experimental": true,
- "deprecated": true,
+ "name": "setCookie",
+ "description": "Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.",
"parameters": [
{
- "name": "interceptionId",
- "$ref": "InterceptionId"
- },
- {
- "name": "errorReason",
- "description": "If set this causes the request to fail with the given reason. Passing `Aborted` for requests\nmarked with `isNavigationRequest` also cancels the navigation. Must not be set in response\nto an authChallenge.",
- "optional": true,
- "$ref": "ErrorReason"
+ "name": "name",
+ "description": "Cookie name.",
+ "type": "string"
},
{
- "name": "rawResponse",
- "description": "If set the requests completes using with the provided base64 encoded raw response, including\nHTTP status line and headers etc... Must not be set in response to an authChallenge.",
- "optional": true,
+ "name": "value",
+ "description": "Cookie value.",
"type": "string"
},
{
"name": "url",
- "description": "If set the request url will be modified in a way that's not observable by page. Must not be\nset in response to an authChallenge.",
+ "description": "The request-URI to associate with the setting of the cookie. This value can affect the\ndefault domain, path, source port, and source scheme values of the created cookie.",
"optional": true,
"type": "string"
},
{
- "name": "method",
- "description": "If set this allows the request method to be overridden. Must not be set in response to an\nauthChallenge.",
+ "name": "domain",
+ "description": "Cookie domain.",
"optional": true,
"type": "string"
},
{
- "name": "postData",
- "description": "If set this allows postData to be set. Must not be set in response to an authChallenge.",
+ "name": "path",
+ "description": "Cookie path.",
"optional": true,
"type": "string"
},
{
- "name": "headers",
- "description": "If set this allows the request headers to be changed. Must not be set in response to an\nauthChallenge.",
+ "name": "secure",
+ "description": "True if cookie is secure.",
"optional": true,
- "$ref": "Headers"
+ "type": "boolean"
},
{
- "name": "authChallengeResponse",
- "description": "Response to a requestIntercepted with an authChallenge. Must not be set otherwise.",
+ "name": "httpOnly",
+ "description": "True if cookie is http-only.",
"optional": true,
- "$ref": "AuthChallengeResponse"
- }
- ]
- },
- {
- "name": "deleteCookies",
- "description": "Deletes browser cookies with matching name and url or domain/path pair.",
- "parameters": [
- {
- "name": "name",
- "description": "Name of the cookies to remove.",
- "type": "string"
+ "type": "boolean"
},
{
- "name": "url",
- "description": "If specified, deletes all the cookies with the given name where domain and path match\nprovided URL.",
+ "name": "sameSite",
+ "description": "Cookie SameSite type.",
"optional": true,
- "type": "string"
+ "$ref": "CookieSameSite"
},
{
- "name": "domain",
- "description": "If specified, deletes only cookies with the exact domain.",
+ "name": "expires",
+ "description": "Cookie expiration date, session cookie if not set",
"optional": true,
- "type": "string"
+ "$ref": "TimeSinceEpoch"
},
{
- "name": "path",
- "description": "If specified, deletes only cookies with the exact path.",
+ "name": "priority",
+ "description": "Cookie Priority type.",
+ "experimental": true,
"optional": true,
- "type": "string"
- }
- ]
- },
- {
- "name": "disable",
- "description": "Disables network tracking, prevents network events from being sent to the client."
- },
- {
- "name": "emulateNetworkConditions",
- "description": "Activates emulation of network conditions.",
- "parameters": [
- {
- "name": "offline",
- "description": "True to emulate internet disconnection.",
- "type": "boolean"
- },
- {
- "name": "latency",
- "description": "Minimum latency from request sent to response headers received (ms).",
- "type": "number"
- },
- {
- "name": "downloadThroughput",
- "description": "Maximal aggregated download throughput (bytes/sec). -1 disables download throttling.",
- "type": "number"
- },
- {
- "name": "uploadThroughput",
- "description": "Maximal aggregated upload throughput (bytes/sec). -1 disables upload throttling.",
- "type": "number"
+ "$ref": "CookiePriority"
},
{
- "name": "connectionType",
- "description": "Connection type if known.",
+ "name": "sameParty",
+ "description": "True if cookie is SameParty.",
+ "experimental": true,
"optional": true,
- "$ref": "ConnectionType"
- }
- ]
- },
- {
- "name": "enable",
- "description": "Enables network tracking, network events will now be delivered to the client.",
- "parameters": [
+ "type": "boolean"
+ },
{
- "name": "maxTotalBufferSize",
- "description": "Buffer size in bytes to use when preserving network payloads (XHRs, etc).",
+ "name": "sourceScheme",
+ "description": "Cookie source scheme type.",
"experimental": true,
"optional": true,
- "type": "integer"
+ "$ref": "CookieSourceScheme"
},
{
- "name": "maxResourceBufferSize",
- "description": "Per-resource buffer size in bytes to use when preserving network payloads (XHRs, etc).",
+ "name": "sourcePort",
+ "description": "Cookie source port. Valid values are {-1, [1, 65535]}, -1 indicates an unspecified port.\nAn unspecified port value allows protocol clients to emulate legacy cookie scope for the port.\nThis is a temporary ability and it will be removed in the future.",
"experimental": true,
"optional": true,
"type": "integer"
},
{
- "name": "maxPostDataSize",
- "description": "Longest post body size (in bytes) that would be included in requestWillBeSent notification",
+ "name": "partitionKey",
+ "description": "Cookie partition key. The site of the top-level URL the browser was visiting at the start\nof the request to the endpoint that set the cookie.\nIf not set, the cookie will be set as not partitioned.",
+ "experimental": true,
"optional": true,
- "type": "integer"
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "success",
+ "description": "Always set to true. If an error occurs, the response indicates protocol error.",
+ "deprecated": true,
+ "type": "boolean"
}
]
},
{
- "name": "getAllCookies",
- "description": "Returns all browser cookies. Depending on the backend support, will return detailed cookie\ninformation in the `cookies` field.",
- "returns": [
+ "name": "setCookies",
+ "description": "Sets given cookies.",
+ "parameters": [
{
"name": "cookies",
- "description": "Array of cookie objects.",
+ "description": "Cookies to be set.",
"type": "array",
"items": {
- "$ref": "Cookie"
+ "$ref": "CookieParam"
}
}
]
},
{
- "name": "getCertificate",
- "description": "Returns the DER-encoded certificate.",
- "experimental": true,
+ "name": "setExtraHTTPHeaders",
+ "description": "Specifies whether to always send extra HTTP headers with the requests from this page.",
"parameters": [
{
- "name": "origin",
- "description": "Origin to get certificate for.",
- "type": "string"
- }
- ],
- "returns": [
- {
- "name": "tableNames",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "headers",
+ "description": "Map with extra HTTP headers.",
+ "$ref": "Headers"
}
]
},
{
- "name": "getCookies",
- "description": "Returns all browser cookies for the current URL. Depending on the backend support, will return\ndetailed cookie information in the `cookies` field.",
+ "name": "setAttachDebugStack",
+ "description": "Specifies whether to attach a page script stack id in requests",
+ "experimental": true,
"parameters": [
{
- "name": "urls",
- "description": "The list of URLs for which applicable cookies will be fetched",
- "optional": true,
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "enabled",
+ "description": "Whether to attach a page script stack for debugging purpose.",
+ "type": "boolean"
}
- ],
- "returns": [
+ ]
+ },
+ {
+ "name": "setRequestInterception",
+ "description": "Sets the requests to intercept that match the provided patterns and optionally resource types.\nDeprecated, please use Fetch.enable instead.",
+ "experimental": true,
+ "deprecated": true,
+ "parameters": [
{
- "name": "cookies",
- "description": "Array of cookie objects.",
+ "name": "patterns",
+ "description": "Requests matching any of these patterns will be forwarded and wait for the corresponding\ncontinueInterceptedRequest call.",
"type": "array",
"items": {
- "$ref": "Cookie"
+ "$ref": "RequestPattern"
}
}
]
},
{
- "name": "getResponseBody",
- "description": "Returns content served for the given request.",
+ "name": "setUserAgentOverride",
+ "description": "Allows overriding user agent with the given string.",
+ "redirect": "Emulation",
"parameters": [
{
- "name": "requestId",
- "description": "Identifier of the network request to get content for.",
- "$ref": "RequestId"
- }
- ],
- "returns": [
+ "name": "userAgent",
+ "description": "User agent to use.",
+ "type": "string"
+ },
{
- "name": "body",
- "description": "Response body.",
+ "name": "acceptLanguage",
+ "description": "Browser langugage to emulate.",
+ "optional": true,
"type": "string"
},
{
- "name": "base64Encoded",
- "description": "True, if content was sent as base64.",
- "type": "boolean"
+ "name": "platform",
+ "description": "The platform navigator.platform should return.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "userAgentMetadata",
+ "description": "To be sent in Sec-CH-UA-* headers and returned in navigator.userAgentData",
+ "experimental": true,
+ "optional": true,
+ "$ref": "Emulation.UserAgentMetadata"
}
]
},
{
- "name": "getRequestPostData",
- "description": "Returns post data sent with the request. Returns an error when no data was sent with the request.",
+ "name": "getSecurityIsolationStatus",
+ "description": "Returns information about the COEP/COOP isolation status.",
+ "experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Identifier of the network request to get content for.",
- "$ref": "RequestId"
+ "name": "frameId",
+ "description": "If no frameId is provided, the status of the target is provided.",
+ "optional": true,
+ "$ref": "Page.FrameId"
}
],
"returns": [
{
- "name": "postData",
- "description": "Request body string, omitting files from multipart requests",
- "type": "string"
+ "name": "status",
+ "$ref": "SecurityIsolationStatus"
}
]
},
{
- "name": "getResponseBodyForInterception",
- "description": "Returns content served for the given currently intercepted request.",
+ "name": "enableReportingApi",
+ "description": "Enables tracking for the Reporting API, events generated by the Reporting API will now be delivered to the client.\nEnabling triggers 'reportingApiReportAdded' for all existing reports.",
"experimental": true,
"parameters": [
{
- "name": "interceptionId",
- "description": "Identifier for the intercepted request to get body for.",
- "$ref": "InterceptionId"
- }
- ],
- "returns": [
- {
- "name": "body",
- "description": "Response body.",
- "type": "string"
- },
- {
- "name": "base64Encoded",
- "description": "True, if content was sent as base64.",
+ "name": "enable",
+ "description": "Whether to enable or disable events for the Reporting API",
"type": "boolean"
}
]
},
{
- "name": "takeResponseBodyForInterceptionAsStream",
- "description": "Returns a handle to the stream representing the response body. Note that after this command,\nthe intercepted request can't be continued as is -- you either need to cancel it or to provide\nthe response body. The stream only supports sequential read, IO.read will fail if the position\nis specified.",
+ "name": "loadNetworkResource",
+ "description": "Fetches the resource and returns the content.",
"experimental": true,
"parameters": [
{
- "name": "interceptionId",
- "$ref": "InterceptionId"
+ "name": "frameId",
+ "description": "Frame id to get the resource for. Mandatory for frame targets, and\nshould be omitted for worker targets.",
+ "optional": true,
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "url",
+ "description": "URL of the resource to get content for.",
+ "type": "string"
+ },
+ {
+ "name": "options",
+ "description": "Options for the request.",
+ "$ref": "LoadNetworkResourceOptions"
}
],
"returns": [
{
- "name": "stream",
- "$ref": "IO.StreamHandle"
+ "name": "resource",
+ "$ref": "LoadNetworkResourcePageResult"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "dataReceived",
+ "description": "Fired when data chunk was received over the network.",
+ "parameters": [
+ {
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
+ },
+ {
+ "name": "dataLength",
+ "description": "Data chunk length.",
+ "type": "integer"
+ },
+ {
+ "name": "encodedDataLength",
+ "description": "Actual bytes received (might be less than dataLength for compressed encodings).",
+ "type": "integer"
}
]
},
{
- "name": "replayXHR",
- "description": "This method sends a new XMLHttpRequest which is identical to the original one. The following\nparameters should be identical: method, url, async, request body, extra headers, withCredentials\nattribute, user, password.",
- "experimental": true,
+ "name": "eventSourceMessageReceived",
+ "description": "Fired when EventSource message is received.",
"parameters": [
{
"name": "requestId",
- "description": "Identifier of XHR to replay.",
+ "description": "Request identifier.",
"$ref": "RequestId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
+ },
+ {
+ "name": "eventName",
+ "description": "Message type.",
+ "type": "string"
+ },
+ {
+ "name": "eventId",
+ "description": "Message identifier.",
+ "type": "string"
+ },
+ {
+ "name": "data",
+ "description": "Message content.",
+ "type": "string"
}
]
},
{
- "name": "searchInResponseBody",
- "description": "Searches for given string in response content.",
- "experimental": true,
+ "name": "loadingFailed",
+ "description": "Fired when HTTP request has failed to load.",
"parameters": [
{
"name": "requestId",
- "description": "Identifier of the network response to search.",
+ "description": "Request identifier.",
"$ref": "RequestId"
},
{
- "name": "query",
- "description": "String to search for.",
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
+ },
+ {
+ "name": "type",
+ "description": "Resource type.",
+ "$ref": "ResourceType"
+ },
+ {
+ "name": "errorText",
+ "description": "User friendly error message.",
"type": "string"
},
{
- "name": "caseSensitive",
- "description": "If true, search is case sensitive.",
+ "name": "canceled",
+ "description": "True if loading was canceled.",
"optional": true,
"type": "boolean"
},
{
- "name": "isRegex",
- "description": "If true, treats string parameter as regex.",
+ "name": "blockedReason",
+ "description": "The reason why loading was blocked, if any.",
"optional": true,
- "type": "boolean"
- }
- ],
- "returns": [
+ "$ref": "BlockedReason"
+ },
{
- "name": "result",
- "description": "List of search matches.",
- "type": "array",
- "items": {
- "$ref": "Debugger.SearchMatch"
- }
+ "name": "corsErrorStatus",
+ "description": "The reason why loading was blocked by CORS, if any.",
+ "optional": true,
+ "$ref": "CorsErrorStatus"
}
]
},
{
- "name": "setBlockedURLs",
- "description": "Blocks URLs from loading.",
- "experimental": true,
+ "name": "loadingFinished",
+ "description": "Fired when HTTP request has finished loading.",
"parameters": [
{
- "name": "urls",
- "description": "URL patterns to block. Wildcards ('*') are allowed.",
- "type": "array",
- "items": {
- "type": "string"
- }
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
+ },
+ {
+ "name": "encodedDataLength",
+ "description": "Total number of bytes received for this request.",
+ "type": "number"
+ },
+ {
+ "name": "shouldReportCorbBlocking",
+ "description": "Set when 1) response was blocked by Cross-Origin Read Blocking and also\n2) this needs to be reported to the DevTools console.",
+ "optional": true,
+ "type": "boolean"
}
]
},
{
- "name": "setBypassServiceWorker",
- "description": "Toggles ignoring of service worker for each request.",
+ "name": "requestIntercepted",
+ "description": "Details of an intercepted HTTP request, which must be either allowed, blocked, modified or\nmocked.\nDeprecated, use Fetch.requestPaused instead.",
"experimental": true,
+ "deprecated": true,
"parameters": [
{
- "name": "bypass",
- "description": "Bypass service worker and load from network.",
+ "name": "interceptionId",
+ "description": "Each request the page makes will have a unique id, however if any redirects are encountered\nwhile processing that fetch, they will be reported with the same id as the original fetch.\nLikewise if HTTP authentication is needed then the same fetch id will be used.",
+ "$ref": "InterceptionId"
+ },
+ {
+ "name": "request",
+ "$ref": "Request"
+ },
+ {
+ "name": "frameId",
+ "description": "The id of the frame that initiated the request.",
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "resourceType",
+ "description": "How the requested resource will be used.",
+ "$ref": "ResourceType"
+ },
+ {
+ "name": "isNavigationRequest",
+ "description": "Whether this is a navigation request, which can abort the navigation completely.",
+ "type": "boolean"
+ },
+ {
+ "name": "isDownload",
+ "description": "Set if the request is a navigation that will result in a download.\nOnly present after response is received from the server (i.e. HeadersReceived stage).",
+ "optional": true,
"type": "boolean"
+ },
+ {
+ "name": "redirectUrl",
+ "description": "Redirect location, only sent if a redirect was intercepted.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "authChallenge",
+ "description": "Details of the Authorization Challenge encountered. If this is set then\ncontinueInterceptedRequest must contain an authChallengeResponse.",
+ "optional": true,
+ "$ref": "AuthChallenge"
+ },
+ {
+ "name": "responseErrorReason",
+ "description": "Response error if intercepted at response stage or if redirect occurred while intercepting\nrequest.",
+ "optional": true,
+ "$ref": "ErrorReason"
+ },
+ {
+ "name": "responseStatusCode",
+ "description": "Response code if intercepted at response stage or if redirect occurred while intercepting\nrequest or auth retry occurred.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "responseHeaders",
+ "description": "Response headers if intercepted at the response stage or if redirect occurred while\nintercepting request or auth retry occurred.",
+ "optional": true,
+ "$ref": "Headers"
+ },
+ {
+ "name": "requestId",
+ "description": "If the intercepted request had a corresponding requestWillBeSent event fired for it, then\nthis requestId will be the same as the requestId present in the requestWillBeSent event.",
+ "optional": true,
+ "$ref": "RequestId"
}
]
},
{
- "name": "setCacheDisabled",
- "description": "Toggles ignoring cache for each request. If `true`, cache will not be used.",
+ "name": "requestServedFromCache",
+ "description": "Fired if request ended up loading from cache.",
"parameters": [
{
- "name": "cacheDisabled",
- "description": "Cache disabled state.",
- "type": "boolean"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
}
]
},
{
- "name": "setCookie",
- "description": "Sets a cookie with the given cookie data; may overwrite equivalent cookies if they exist.",
+ "name": "requestWillBeSent",
+ "description": "Fired when page is about to send HTTP request.",
"parameters": [
{
- "name": "name",
- "description": "Cookie name.",
- "type": "string"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
},
{
- "name": "value",
- "description": "Cookie value.",
- "type": "string"
+ "name": "loaderId",
+ "description": "Loader identifier. Empty string if the request is fetched from worker.",
+ "$ref": "LoaderId"
},
{
- "name": "url",
- "description": "The request-URI to associate with the setting of the cookie. This value can affect the\ndefault domain and path values of the created cookie.",
- "optional": true,
+ "name": "documentURL",
+ "description": "URL of the document this request is loaded for.",
"type": "string"
},
{
- "name": "domain",
- "description": "Cookie domain.",
- "optional": true,
- "type": "string"
+ "name": "request",
+ "description": "Request data.",
+ "$ref": "Request"
},
{
- "name": "path",
- "description": "Cookie path.",
- "optional": true,
- "type": "string"
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
},
{
- "name": "secure",
- "description": "True if cookie is secure.",
- "optional": true,
+ "name": "wallTime",
+ "description": "Timestamp.",
+ "$ref": "TimeSinceEpoch"
+ },
+ {
+ "name": "initiator",
+ "description": "Request initiator.",
+ "$ref": "Initiator"
+ },
+ {
+ "name": "redirectHasExtraInfo",
+ "description": "In the case that redirectResponse is populated, this flag indicates whether\nrequestWillBeSentExtraInfo and responseReceivedExtraInfo events will be or were emitted\nfor the request which was just redirected.",
+ "experimental": true,
"type": "boolean"
},
{
- "name": "httpOnly",
- "description": "True if cookie is http-only.",
+ "name": "redirectResponse",
+ "description": "Redirect response data.",
"optional": true,
- "type": "boolean"
+ "$ref": "Response"
},
{
- "name": "sameSite",
- "description": "Cookie SameSite type.",
+ "name": "type",
+ "description": "Type of this resource.",
"optional": true,
- "$ref": "CookieSameSite"
+ "$ref": "ResourceType"
},
{
- "name": "expires",
- "description": "Cookie expiration date, session cookie if not set",
+ "name": "frameId",
+ "description": "Frame identifier.",
"optional": true,
- "$ref": "TimeSinceEpoch"
- }
- ],
- "returns": [
+ "$ref": "Page.FrameId"
+ },
{
- "name": "success",
- "description": "True if successfully set cookie.",
+ "name": "hasUserGesture",
+ "description": "Whether the request is initiated by a user gesture. Defaults to false.",
+ "optional": true,
"type": "boolean"
}
]
},
{
- "name": "setCookies",
- "description": "Sets given cookies.",
+ "name": "resourceChangedPriority",
+ "description": "Fired when resource loading priority is changed",
+ "experimental": true,
"parameters": [
{
- "name": "cookies",
- "description": "Cookies to be set.",
- "type": "array",
- "items": {
- "$ref": "CookieParam"
- }
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "newPriority",
+ "description": "New priority",
+ "$ref": "ResourcePriority"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
}
]
},
{
- "name": "setDataSizeLimitsForTest",
- "description": "For testing.",
+ "name": "signedExchangeReceived",
+ "description": "Fired when a signed exchange was received over the network",
"experimental": true,
"parameters": [
{
- "name": "maxTotalSize",
- "description": "Maximum total buffer size.",
- "type": "integer"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
},
{
- "name": "maxResourceSize",
- "description": "Maximum per-resource size.",
- "type": "integer"
+ "name": "info",
+ "description": "Information about the signed exchange response.",
+ "$ref": "SignedExchangeInfo"
}
]
},
{
- "name": "setExtraHTTPHeaders",
- "description": "Specifies whether to always send extra HTTP headers with the requests from this page.",
+ "name": "responseReceived",
+ "description": "Fired when HTTP response is available.",
"parameters": [
{
- "name": "headers",
- "description": "Map with extra HTTP headers.",
- "$ref": "Headers"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "loaderId",
+ "description": "Loader identifier. Empty string if the request is fetched from worker.",
+ "$ref": "LoaderId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
+ },
+ {
+ "name": "type",
+ "description": "Resource type.",
+ "$ref": "ResourceType"
+ },
+ {
+ "name": "response",
+ "description": "Response data.",
+ "$ref": "Response"
+ },
+ {
+ "name": "hasExtraInfo",
+ "description": "Indicates whether requestWillBeSentExtraInfo and responseReceivedExtraInfo events will be\nor were emitted for this request.",
+ "experimental": true,
+ "type": "boolean"
+ },
+ {
+ "name": "frameId",
+ "description": "Frame identifier.",
+ "optional": true,
+ "$ref": "Page.FrameId"
}
]
},
{
- "name": "setRequestInterception",
- "description": "Sets the requests to intercept that match the provided patterns and optionally resource types.\nDeprecated, please use Fetch.enable instead.",
- "experimental": true,
- "deprecated": true,
+ "name": "webSocketClosed",
+ "description": "Fired when WebSocket is closed.",
"parameters": [
{
- "name": "patterns",
- "description": "Requests matching any of these patterns will be forwarded and wait for the corresponding\ncontinueInterceptedRequest call.",
- "type": "array",
- "items": {
- "$ref": "RequestPattern"
- }
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
}
]
},
{
- "name": "setUserAgentOverride",
- "description": "Allows overriding user agent with the given string.",
- "redirect": "Emulation",
+ "name": "webSocketCreated",
+ "description": "Fired upon WebSocket creation.",
"parameters": [
{
- "name": "userAgent",
- "description": "User agent to use.",
- "type": "string"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
},
{
- "name": "acceptLanguage",
- "description": "Browser langugage to emulate.",
- "optional": true,
+ "name": "url",
+ "description": "WebSocket request URL.",
"type": "string"
},
{
- "name": "platform",
- "description": "The platform navigator.platform should return.",
+ "name": "initiator",
+ "description": "Request initiator.",
"optional": true,
- "type": "string"
+ "$ref": "Initiator"
}
]
- }
- ],
- "events": [
+ },
{
- "name": "dataReceived",
- "description": "Fired when data chunk was received over the network.",
+ "name": "webSocketFrameError",
+ "description": "Fired when WebSocket message error occurs.",
"parameters": [
{
"name": "requestId",
@@ -9924,20 +13196,15 @@
"$ref": "MonotonicTime"
},
{
- "name": "dataLength",
- "description": "Data chunk length.",
- "type": "integer"
- },
- {
- "name": "encodedDataLength",
- "description": "Actual bytes received (might be less than dataLength for compressed encodings).",
- "type": "integer"
+ "name": "errorMessage",
+ "description": "WebSocket error message.",
+ "type": "string"
}
]
},
{
- "name": "eventSourceMessageReceived",
- "description": "Fired when EventSource message is received.",
+ "name": "webSocketFrameReceived",
+ "description": "Fired when WebSocket message is received.",
"parameters": [
{
"name": "requestId",
@@ -9950,25 +13217,15 @@
"$ref": "MonotonicTime"
},
{
- "name": "eventName",
- "description": "Message type.",
- "type": "string"
- },
- {
- "name": "eventId",
- "description": "Message identifier.",
- "type": "string"
- },
- {
- "name": "data",
- "description": "Message content.",
- "type": "string"
+ "name": "response",
+ "description": "WebSocket response data.",
+ "$ref": "WebSocketFrame"
}
]
},
{
- "name": "loadingFailed",
- "description": "Fired when HTTP request has failed to load.",
+ "name": "webSocketFrameSent",
+ "description": "Fired when WebSocket message is sent.",
"parameters": [
{
"name": "requestId",
@@ -9981,32 +13238,15 @@
"$ref": "MonotonicTime"
},
{
- "name": "type",
- "description": "Resource type.",
- "$ref": "ResourceType"
- },
- {
- "name": "errorText",
- "description": "User friendly error message.",
- "type": "string"
- },
- {
- "name": "canceled",
- "description": "True if loading was canceled.",
- "optional": true,
- "type": "boolean"
- },
- {
- "name": "blockedReason",
- "description": "The reason why loading was blocked, if any.",
- "optional": true,
- "$ref": "BlockedReason"
+ "name": "response",
+ "description": "WebSocket response data.",
+ "$ref": "WebSocketFrame"
}
]
},
{
- "name": "loadingFinished",
- "description": "Fired when HTTP request has finished loading.",
+ "name": "webSocketHandshakeResponseReceived",
+ "description": "Fired when WebSocket handshake response becomes available.",
"parameters": [
{
"name": "requestId",
@@ -10019,460 +13259,641 @@
"$ref": "MonotonicTime"
},
{
- "name": "encodedDataLength",
- "description": "Total number of bytes received for this request.",
- "type": "number"
- },
- {
- "name": "shouldReportCorbBlocking",
- "description": "Set when 1) response was blocked by Cross-Origin Read Blocking and also\n2) this needs to be reported to the DevTools console.",
- "optional": true,
- "type": "boolean"
+ "name": "response",
+ "description": "WebSocket response data.",
+ "$ref": "WebSocketResponse"
}
]
},
{
- "name": "requestIntercepted",
- "description": "Details of an intercepted HTTP request, which must be either allowed, blocked, modified or\nmocked.\nDeprecated, use Fetch.requestPaused instead.",
- "experimental": true,
- "deprecated": true,
+ "name": "webSocketWillSendHandshakeRequest",
+ "description": "Fired when WebSocket is about to initiate handshake.",
"parameters": [
{
- "name": "interceptionId",
- "description": "Each request the page makes will have a unique id, however if any redirects are encountered\nwhile processing that fetch, they will be reported with the same id as the original fetch.\nLikewise if HTTP authentication is needed then the same fetch id will be used.",
- "$ref": "InterceptionId"
- },
- {
- "name": "request",
- "$ref": "Request"
+ "name": "requestId",
+ "description": "Request identifier.",
+ "$ref": "RequestId"
},
{
- "name": "frameId",
- "description": "The id of the frame that initiated the request.",
- "$ref": "Page.FrameId"
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
},
{
- "name": "resourceType",
- "description": "How the requested resource will be used.",
- "$ref": "ResourceType"
+ "name": "wallTime",
+ "description": "UTC Timestamp.",
+ "$ref": "TimeSinceEpoch"
},
{
- "name": "isNavigationRequest",
- "description": "Whether this is a navigation request, which can abort the navigation completely.",
- "type": "boolean"
- },
+ "name": "request",
+ "description": "WebSocket request data.",
+ "$ref": "WebSocketRequest"
+ }
+ ]
+ },
+ {
+ "name": "webTransportCreated",
+ "description": "Fired upon WebTransport creation.",
+ "parameters": [
{
- "name": "isDownload",
- "description": "Set if the request is a navigation that will result in a download.\nOnly present after response is received from the server (i.e. HeadersReceived stage).",
- "optional": true,
- "type": "boolean"
+ "name": "transportId",
+ "description": "WebTransport identifier.",
+ "$ref": "RequestId"
},
{
- "name": "redirectUrl",
- "description": "Redirect location, only sent if a redirect was intercepted.",
- "optional": true,
+ "name": "url",
+ "description": "WebTransport request URL.",
"type": "string"
},
{
- "name": "authChallenge",
- "description": "Details of the Authorization Challenge encountered. If this is set then\ncontinueInterceptedRequest must contain an authChallengeResponse.",
- "optional": true,
- "$ref": "AuthChallenge"
- },
- {
- "name": "responseErrorReason",
- "description": "Response error if intercepted at response stage or if redirect occurred while intercepting\nrequest.",
- "optional": true,
- "$ref": "ErrorReason"
- },
- {
- "name": "responseStatusCode",
- "description": "Response code if intercepted at response stage or if redirect occurred while intercepting\nrequest or auth retry occurred.",
- "optional": true,
- "type": "integer"
- },
- {
- "name": "responseHeaders",
- "description": "Response headers if intercepted at the response stage or if redirect occurred while\nintercepting request or auth retry occurred.",
- "optional": true,
- "$ref": "Headers"
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
},
{
- "name": "requestId",
- "description": "If the intercepted request had a corresponding requestWillBeSent event fired for it, then\nthis requestId will be the same as the requestId present in the requestWillBeSent event.",
+ "name": "initiator",
+ "description": "Request initiator.",
"optional": true,
- "$ref": "RequestId"
+ "$ref": "Initiator"
}
]
},
{
- "name": "requestServedFromCache",
- "description": "Fired if request ended up loading from cache.",
+ "name": "webTransportConnectionEstablished",
+ "description": "Fired when WebTransport handshake is finished.",
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
+ "name": "transportId",
+ "description": "WebTransport identifier.",
"$ref": "RequestId"
+ },
+ {
+ "name": "timestamp",
+ "description": "Timestamp.",
+ "$ref": "MonotonicTime"
}
]
},
{
- "name": "requestWillBeSent",
- "description": "Fired when page is about to send HTTP request.",
+ "name": "webTransportClosed",
+ "description": "Fired when WebTransport is disposed.",
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
+ "name": "transportId",
+ "description": "WebTransport identifier.",
"$ref": "RequestId"
},
- {
- "name": "loaderId",
- "description": "Loader identifier. Empty string if the request is fetched from worker.",
- "$ref": "LoaderId"
- },
- {
- "name": "documentURL",
- "description": "URL of the document this request is loaded for.",
- "type": "string"
- },
- {
- "name": "request",
- "description": "Request data.",
- "$ref": "Request"
- },
{
"name": "timestamp",
"description": "Timestamp.",
"$ref": "MonotonicTime"
- },
- {
- "name": "wallTime",
- "description": "Timestamp.",
- "$ref": "TimeSinceEpoch"
- },
+ }
+ ]
+ },
+ {
+ "name": "requestWillBeSentExtraInfo",
+ "description": "Fired when additional information about a requestWillBeSent event is available from the\nnetwork stack. Not every requestWillBeSent event will have an additional\nrequestWillBeSentExtraInfo fired for it, and there is no guarantee whether requestWillBeSent\nor requestWillBeSentExtraInfo will be fired first for the same request.",
+ "experimental": true,
+ "parameters": [
{
- "name": "initiator",
- "description": "Request initiator.",
- "$ref": "Initiator"
+ "name": "requestId",
+ "description": "Request identifier. Used to match this information to an existing requestWillBeSent event.",
+ "$ref": "RequestId"
},
{
- "name": "redirectResponse",
- "description": "Redirect response data.",
- "optional": true,
- "$ref": "Response"
+ "name": "associatedCookies",
+ "description": "A list of cookies potentially associated to the requested URL. This includes both cookies sent with\nthe request and the ones not sent; the latter are distinguished by having blockedReason field set.",
+ "type": "array",
+ "items": {
+ "$ref": "BlockedCookieWithReason"
+ }
},
{
- "name": "type",
- "description": "Type of this resource.",
- "optional": true,
- "$ref": "ResourceType"
+ "name": "headers",
+ "description": "Raw request headers as they will be sent over the wire.",
+ "$ref": "Headers"
},
{
- "name": "frameId",
- "description": "Frame identifier.",
- "optional": true,
- "$ref": "Page.FrameId"
+ "name": "connectTiming",
+ "description": "Connection timing information for the request.",
+ "experimental": true,
+ "$ref": "ConnectTiming"
},
{
- "name": "hasUserGesture",
- "description": "Whether the request is initiated by a user gesture. Defaults to false.",
+ "name": "clientSecurityState",
+ "description": "The client security state set for the request.",
"optional": true,
- "type": "boolean"
+ "$ref": "ClientSecurityState"
}
]
},
{
- "name": "resourceChangedPriority",
- "description": "Fired when resource loading priority is changed",
+ "name": "responseReceivedExtraInfo",
+ "description": "Fired when additional information about a responseReceived event is available from the network\nstack. Not every responseReceived event will have an additional responseReceivedExtraInfo for\nit, and responseReceivedExtraInfo may be fired before or after responseReceived.",
"experimental": true,
"parameters": [
{
"name": "requestId",
- "description": "Request identifier.",
+ "description": "Request identifier. Used to match this information to another responseReceived event.",
"$ref": "RequestId"
},
{
- "name": "newPriority",
- "description": "New priority",
- "$ref": "ResourcePriority"
+ "name": "blockedCookies",
+ "description": "A list of cookies which were not stored from the response along with the corresponding\nreasons for blocking. The cookies here may not be valid due to syntax errors, which\nare represented by the invalid cookie line string instead of a proper cookie.",
+ "type": "array",
+ "items": {
+ "$ref": "BlockedSetCookieWithReason"
+ }
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "headers",
+ "description": "Raw response headers as they were received over the wire.",
+ "$ref": "Headers"
+ },
+ {
+ "name": "resourceIPAddressSpace",
+ "description": "The IP address space of the resource. The address space can only be determined once the transport\nestablished the connection, so we can't send it in `requestWillBeSentExtraInfo`.",
+ "$ref": "IPAddressSpace"
+ },
+ {
+ "name": "statusCode",
+ "description": "The status code of the response. This is useful in cases the request failed and no responseReceived\nevent is triggered, which is the case for, e.g., CORS errors. This is also the correct status code\nfor cached requests, where the status in responseReceived is a 200 and this will be 304.",
+ "type": "integer"
+ },
+ {
+ "name": "headersText",
+ "description": "Raw response header text as it was received over the wire. The raw text may not always be\navailable, such as in the case of HTTP/2 or QUIC.",
+ "optional": true,
+ "type": "string"
}
]
},
{
- "name": "signedExchangeReceived",
- "description": "Fired when a signed exchange was received over the network",
+ "name": "trustTokenOperationDone",
+ "description": "Fired exactly once for each Trust Token operation. Depending on\nthe type of the operation and whether the operation succeeded or\nfailed, the event is fired before the corresponding request was sent\nor after the response was received.",
"experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
- "$ref": "RequestId"
+ "name": "status",
+ "description": "Detailed success or error status of the operation.\n'AlreadyExists' also signifies a successful operation, as the result\nof the operation already exists und thus, the operation was abort\npreemptively (e.g. a cache hit).",
+ "type": "string",
+ "enum": [
+ "Ok",
+ "InvalidArgument",
+ "FailedPrecondition",
+ "ResourceExhausted",
+ "AlreadyExists",
+ "Unavailable",
+ "BadResponse",
+ "InternalError",
+ "UnknownError",
+ "FulfilledLocally"
+ ]
},
{
- "name": "info",
- "description": "Information about the signed exchange response.",
- "$ref": "SignedExchangeInfo"
- }
- ]
- },
- {
- "name": "responseReceived",
- "description": "Fired when HTTP response is available.",
- "parameters": [
+ "name": "type",
+ "$ref": "TrustTokenOperationType"
+ },
{
"name": "requestId",
- "description": "Request identifier.",
"$ref": "RequestId"
},
{
- "name": "loaderId",
- "description": "Loader identifier. Empty string if the request is fetched from worker.",
- "$ref": "LoaderId"
+ "name": "topLevelOrigin",
+ "description": "Top level origin. The context in which the operation was attempted.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "issuerOrigin",
+ "description": "Origin of the issuer in case of a \"Issuance\" or \"Redemption\" operation.",
+ "optional": true,
+ "type": "string"
},
{
- "name": "type",
- "description": "Resource type.",
- "$ref": "ResourceType"
- },
+ "name": "issuedTokenCount",
+ "description": "The number of obtained Trust Tokens on a successful \"Issuance\" operation.",
+ "optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "name": "subresourceWebBundleMetadataReceived",
+ "description": "Fired once when parsing the .wbn file has succeeded.\nThe event contains the information about the web bundle contents.",
+ "experimental": true,
+ "parameters": [
{
- "name": "response",
- "description": "Response data.",
- "$ref": "Response"
+ "name": "requestId",
+ "description": "Request identifier. Used to match this information to another event.",
+ "$ref": "RequestId"
},
{
- "name": "frameId",
- "description": "Frame identifier.",
- "optional": true,
- "$ref": "Page.FrameId"
+ "name": "urls",
+ "description": "A list of URLs of resources in the subresource Web Bundle.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
}
]
},
{
- "name": "webSocketClosed",
- "description": "Fired when WebSocket is closed.",
+ "name": "subresourceWebBundleMetadataError",
+ "description": "Fired once when parsing the .wbn file has failed.",
+ "experimental": true,
"parameters": [
{
"name": "requestId",
- "description": "Request identifier.",
+ "description": "Request identifier. Used to match this information to another event.",
"$ref": "RequestId"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "errorMessage",
+ "description": "Error message",
+ "type": "string"
}
]
},
{
- "name": "webSocketCreated",
- "description": "Fired upon WebSocket creation.",
+ "name": "subresourceWebBundleInnerResponseParsed",
+ "description": "Fired when handling requests for resources within a .wbn file.\nNote: this will only be fired for resources that are requested by the webpage.",
+ "experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
+ "name": "innerRequestId",
+ "description": "Request identifier of the subresource request",
"$ref": "RequestId"
},
{
- "name": "url",
- "description": "WebSocket request URL.",
+ "name": "innerRequestURL",
+ "description": "URL of the subresource resource.",
"type": "string"
},
{
- "name": "initiator",
- "description": "Request initiator.",
+ "name": "bundleRequestId",
+ "description": "Bundle request identifier. Used to match this information to another event.\nThis made be absent in case when the instrumentation was enabled only\nafter webbundle was parsed.",
"optional": true,
- "$ref": "Initiator"
+ "$ref": "RequestId"
}
]
},
{
- "name": "webSocketFrameError",
- "description": "Fired when WebSocket message error occurs.",
+ "name": "subresourceWebBundleInnerResponseError",
+ "description": "Fired when request for resources within a .wbn file failed.",
+ "experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
+ "name": "innerRequestId",
+ "description": "Request identifier of the subresource request",
"$ref": "RequestId"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "innerRequestURL",
+ "description": "URL of the subresource resource.",
+ "type": "string"
},
{
"name": "errorMessage",
- "description": "WebSocket error message.",
+ "description": "Error message",
"type": "string"
+ },
+ {
+ "name": "bundleRequestId",
+ "description": "Bundle request identifier. Used to match this information to another event.\nThis made be absent in case when the instrumentation was enabled only\nafter webbundle was parsed.",
+ "optional": true,
+ "$ref": "RequestId"
}
]
},
{
- "name": "webSocketFrameReceived",
- "description": "Fired when WebSocket message is received.",
+ "name": "reportingApiReportAdded",
+ "description": "Is sent whenever a new report is added.\nAnd after 'enableReportingApi' for all existing reports.",
+ "experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
- "$ref": "RequestId"
- },
- {
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
- },
+ "name": "report",
+ "$ref": "ReportingApiReport"
+ }
+ ]
+ },
+ {
+ "name": "reportingApiReportUpdated",
+ "experimental": true,
+ "parameters": [
{
- "name": "response",
- "description": "WebSocket response data.",
- "$ref": "WebSocketFrame"
+ "name": "report",
+ "$ref": "ReportingApiReport"
}
]
},
{
- "name": "webSocketFrameSent",
- "description": "Fired when WebSocket message is sent.",
+ "name": "reportingApiEndpointsChangedForOrigin",
+ "experimental": true,
"parameters": [
{
- "name": "requestId",
- "description": "Request identifier.",
- "$ref": "RequestId"
+ "name": "origin",
+ "description": "Origin of the document(s) which configured the endpoints.",
+ "type": "string"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "endpoints",
+ "type": "array",
+ "items": {
+ "$ref": "ReportingApiEndpoint"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Overlay",
+ "description": "This domain provides various functionality related to drawing atop the inspected page.",
+ "experimental": true,
+ "dependencies": [
+ "DOM",
+ "Page",
+ "Runtime"
+ ],
+ "types": [
+ {
+ "id": "SourceOrderConfig",
+ "description": "Configuration data for drawing the source order of an elements children.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "parentOutlineColor",
+ "description": "the color to outline the givent element in.",
+ "$ref": "DOM.RGBA"
},
{
- "name": "response",
- "description": "WebSocket response data.",
- "$ref": "WebSocketFrame"
+ "name": "childOutlineColor",
+ "description": "the color to outline the child elements in.",
+ "$ref": "DOM.RGBA"
}
]
},
{
- "name": "webSocketHandshakeResponseReceived",
- "description": "Fired when WebSocket handshake response becomes available.",
- "parameters": [
+ "id": "GridHighlightConfig",
+ "description": "Configuration data for the highlighting of Grid elements.",
+ "type": "object",
+ "properties": [
{
- "name": "requestId",
- "description": "Request identifier.",
- "$ref": "RequestId"
+ "name": "showGridExtensionLines",
+ "description": "Whether the extension lines from grid cells to the rulers should be shown (default: false).",
+ "optional": true,
+ "type": "boolean"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "showPositiveLineNumbers",
+ "description": "Show Positive line number labels (default: false).",
+ "optional": true,
+ "type": "boolean"
},
{
- "name": "response",
- "description": "WebSocket response data.",
- "$ref": "WebSocketResponse"
+ "name": "showNegativeLineNumbers",
+ "description": "Show Negative line number labels (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "showAreaNames",
+ "description": "Show area name labels (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "showLineNames",
+ "description": "Show line name labels (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "showTrackSizes",
+ "description": "Show track size labels (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "gridBorderColor",
+ "description": "The grid container border highlight color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "cellBorderColor",
+ "description": "The cell border color (default: transparent). Deprecated, please use rowLineColor and columnLineColor instead.",
+ "deprecated": true,
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "rowLineColor",
+ "description": "The row line color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "columnLineColor",
+ "description": "The column line color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "gridBorderDash",
+ "description": "Whether the grid border is dashed (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "cellBorderDash",
+ "description": "Whether the cell border is dashed (default: false). Deprecated, please us rowLineDash and columnLineDash instead.",
+ "deprecated": true,
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "rowLineDash",
+ "description": "Whether row lines are dashed (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "columnLineDash",
+ "description": "Whether column lines are dashed (default: false).",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "rowGapColor",
+ "description": "The row gap highlight fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "rowHatchColor",
+ "description": "The row gap hatching fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "columnGapColor",
+ "description": "The column gap highlight fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "columnHatchColor",
+ "description": "The column gap hatching fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "areaBorderColor",
+ "description": "The named grid areas border color (Default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "gridBackgroundColor",
+ "description": "The grid container background color (Default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
}
]
},
{
- "name": "webSocketWillSendHandshakeRequest",
- "description": "Fired when WebSocket is about to initiate handshake.",
- "parameters": [
+ "id": "FlexContainerHighlightConfig",
+ "description": "Configuration data for the highlighting of Flex container elements.",
+ "type": "object",
+ "properties": [
{
- "name": "requestId",
- "description": "Request identifier.",
- "$ref": "RequestId"
+ "name": "containerBorder",
+ "description": "The style of the container border",
+ "optional": true,
+ "$ref": "LineStyle"
},
{
- "name": "timestamp",
- "description": "Timestamp.",
- "$ref": "MonotonicTime"
+ "name": "lineSeparator",
+ "description": "The style of the separator between lines",
+ "optional": true,
+ "$ref": "LineStyle"
},
{
- "name": "wallTime",
- "description": "UTC Timestamp.",
- "$ref": "TimeSinceEpoch"
+ "name": "itemSeparator",
+ "description": "The style of the separator between items",
+ "optional": true,
+ "$ref": "LineStyle"
},
{
- "name": "request",
- "description": "WebSocket request data.",
- "$ref": "WebSocketRequest"
+ "name": "mainDistributedSpace",
+ "description": "Style of content-distribution space on the main axis (justify-content).",
+ "optional": true,
+ "$ref": "BoxStyle"
+ },
+ {
+ "name": "crossDistributedSpace",
+ "description": "Style of content-distribution space on the cross axis (align-content).",
+ "optional": true,
+ "$ref": "BoxStyle"
+ },
+ {
+ "name": "rowGapSpace",
+ "description": "Style of empty space caused by row gaps (gap/row-gap).",
+ "optional": true,
+ "$ref": "BoxStyle"
+ },
+ {
+ "name": "columnGapSpace",
+ "description": "Style of empty space caused by columns gaps (gap/column-gap).",
+ "optional": true,
+ "$ref": "BoxStyle"
+ },
+ {
+ "name": "crossAlignment",
+ "description": "Style of the self-alignment line (align-items).",
+ "optional": true,
+ "$ref": "LineStyle"
}
]
},
{
- "name": "requestWillBeSentExtraInfo",
- "description": "Fired when additional information about a requestWillBeSent event is available from the\nnetwork stack. Not every requestWillBeSent event will have an additional\nrequestWillBeSentExtraInfo fired for it, and there is no guarantee whether requestWillBeSent\nor requestWillBeSentExtraInfo will be fired first for the same request.",
- "experimental": true,
- "parameters": [
+ "id": "FlexItemHighlightConfig",
+ "description": "Configuration data for the highlighting of Flex item elements.",
+ "type": "object",
+ "properties": [
{
- "name": "requestId",
- "description": "Request identifier. Used to match this information to an existing requestWillBeSent event.",
- "$ref": "RequestId"
+ "name": "baseSizeBox",
+ "description": "Style of the box representing the item's base size",
+ "optional": true,
+ "$ref": "BoxStyle"
},
{
- "name": "blockedCookies",
- "description": "A list of cookies which will not be sent with this request along with corresponding reasons\nfor blocking.",
- "type": "array",
- "items": {
- "$ref": "BlockedCookieWithReason"
- }
+ "name": "baseSizeBorder",
+ "description": "Style of the border around the box representing the item's base size",
+ "optional": true,
+ "$ref": "LineStyle"
},
{
- "name": "headers",
- "description": "Raw request headers as they will be sent over the wire.",
- "$ref": "Headers"
+ "name": "flexibilityArrow",
+ "description": "Style of the arrow representing if the item grew or shrank",
+ "optional": true,
+ "$ref": "LineStyle"
}
]
},
{
- "name": "responseReceivedExtraInfo",
- "description": "Fired when additional information about a responseReceived event is available from the network\nstack. Not every responseReceived event will have an additional responseReceivedExtraInfo for\nit, and responseReceivedExtraInfo may be fired before or after responseReceived.",
- "experimental": true,
- "parameters": [
- {
- "name": "requestId",
- "description": "Request identifier. Used to match this information to another responseReceived event.",
- "$ref": "RequestId"
- },
+ "id": "LineStyle",
+ "description": "Style information for drawing a line.",
+ "type": "object",
+ "properties": [
{
- "name": "blockedCookies",
- "description": "A list of cookies which were not stored from the response along with the corresponding\nreasons for blocking. The cookies here may not be valid due to syntax errors, which\nare represented by the invalid cookie line string instead of a proper cookie.",
- "type": "array",
- "items": {
- "$ref": "BlockedSetCookieWithReason"
- }
+ "name": "color",
+ "description": "The color of the line (default: transparent)",
+ "optional": true,
+ "$ref": "DOM.RGBA"
},
{
- "name": "headers",
- "description": "Raw response headers as they were received over the wire.",
- "$ref": "Headers"
+ "name": "pattern",
+ "description": "The line pattern (default: solid)",
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "dashed",
+ "dotted"
+ ]
+ }
+ ]
+ },
+ {
+ "id": "BoxStyle",
+ "description": "Style information for drawing a box.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "fillColor",
+ "description": "The background color for the box (default: transparent)",
+ "optional": true,
+ "$ref": "DOM.RGBA"
},
{
- "name": "headersText",
- "description": "Raw response header text as it was received over the wire. The raw text may not always be\navailable, such as in the case of HTTP/2 or QUIC.",
+ "name": "hatchColor",
+ "description": "The hatching color for the box (default: transparent)",
"optional": true,
- "type": "string"
+ "$ref": "DOM.RGBA"
}
]
- }
- ]
- },
- {
- "domain": "Overlay",
- "description": "This domain provides various functionality related to drawing atop the inspected page.",
- "experimental": true,
- "dependencies": [
- "DOM",
- "Page",
- "Runtime"
- ],
- "types": [
+ },
+ {
+ "id": "ContrastAlgorithm",
+ "type": "string",
+ "enum": [
+ "aa",
+ "aaa",
+ "apca"
+ ]
+ },
{
"id": "HighlightConfig",
"description": "Configuration data for the highlighting of page elements.",
@@ -10496,6 +13917,12 @@
"optional": true,
"type": "boolean"
},
+ {
+ "name": "showAccessibilityInfo",
+ "description": "Whether the a11y info should be shown (default: true).",
+ "optional": true,
+ "type": "boolean"
+ },
{
"name": "showExtensionLines",
"description": "Whether the extension lines from node to the rulers should be shown (default: false).",
@@ -10549,6 +13976,228 @@
"description": "The grid layout color (default: transparent).",
"optional": true,
"$ref": "DOM.RGBA"
+ },
+ {
+ "name": "colorFormat",
+ "description": "The color format used to format color styles (default: hex).",
+ "optional": true,
+ "$ref": "ColorFormat"
+ },
+ {
+ "name": "gridHighlightConfig",
+ "description": "The grid layout highlight configuration (default: all transparent).",
+ "optional": true,
+ "$ref": "GridHighlightConfig"
+ },
+ {
+ "name": "flexContainerHighlightConfig",
+ "description": "The flex container highlight configuration (default: all transparent).",
+ "optional": true,
+ "$ref": "FlexContainerHighlightConfig"
+ },
+ {
+ "name": "flexItemHighlightConfig",
+ "description": "The flex item highlight configuration (default: all transparent).",
+ "optional": true,
+ "$ref": "FlexItemHighlightConfig"
+ },
+ {
+ "name": "contrastAlgorithm",
+ "description": "The contrast algorithm to use for the contrast ratio (default: aa).",
+ "optional": true,
+ "$ref": "ContrastAlgorithm"
+ },
+ {
+ "name": "containerQueryContainerHighlightConfig",
+ "description": "The container query container highlight configuration (default: all transparent).",
+ "optional": true,
+ "$ref": "ContainerQueryContainerHighlightConfig"
+ }
+ ]
+ },
+ {
+ "id": "ColorFormat",
+ "type": "string",
+ "enum": [
+ "rgb",
+ "hsl",
+ "hex"
+ ]
+ },
+ {
+ "id": "GridNodeHighlightConfig",
+ "description": "Configurations for Persistent Grid Highlight",
+ "type": "object",
+ "properties": [
+ {
+ "name": "gridHighlightConfig",
+ "description": "A descriptor for the highlight appearance.",
+ "$ref": "GridHighlightConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "id": "FlexNodeHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "flexContainerHighlightConfig",
+ "description": "A descriptor for the highlight appearance of flex containers.",
+ "$ref": "FlexContainerHighlightConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "id": "ScrollSnapContainerHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "snapportBorder",
+ "description": "The style of the snapport border (default: transparent)",
+ "optional": true,
+ "$ref": "LineStyle"
+ },
+ {
+ "name": "snapAreaBorder",
+ "description": "The style of the snap area border (default: transparent)",
+ "optional": true,
+ "$ref": "LineStyle"
+ },
+ {
+ "name": "scrollMarginColor",
+ "description": "The margin highlight fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "scrollPaddingColor",
+ "description": "The padding highlight fill color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ }
+ ]
+ },
+ {
+ "id": "ScrollSnapHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "scrollSnapContainerHighlightConfig",
+ "description": "A descriptor for the highlight appearance of scroll snap containers.",
+ "$ref": "ScrollSnapContainerHighlightConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "id": "HingeConfig",
+ "description": "Configuration for dual screen hinge",
+ "type": "object",
+ "properties": [
+ {
+ "name": "rect",
+ "description": "A rectangle represent hinge",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "contentColor",
+ "description": "The content box highlight fill color (default: a dark color).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "outlineColor",
+ "description": "The content box highlight outline color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ }
+ ]
+ },
+ {
+ "id": "ContainerQueryHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "containerQueryContainerHighlightConfig",
+ "description": "A descriptor for the highlight appearance of container query containers.",
+ "$ref": "ContainerQueryContainerHighlightConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the container node to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "id": "ContainerQueryContainerHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "containerBorder",
+ "description": "The style of the container border.",
+ "optional": true,
+ "$ref": "LineStyle"
+ },
+ {
+ "name": "descendantBorder",
+ "description": "The style of the descendants' borders.",
+ "optional": true,
+ "$ref": "LineStyle"
+ }
+ ]
+ },
+ {
+ "id": "IsolatedElementHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "isolationModeHighlightConfig",
+ "description": "A descriptor for the highlight appearance of an element in isolation mode.",
+ "$ref": "IsolationModeHighlightConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the isolated element to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "id": "IsolationModeHighlightConfig",
+ "type": "object",
+ "properties": [
+ {
+ "name": "resizerColor",
+ "description": "The fill color of the resizers (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "resizerHandleColor",
+ "description": "The fill color for resizer handles (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "maskColor",
+ "description": "The fill color for the mask covering non-isolated elements (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
}
]
},
@@ -10593,6 +14242,18 @@
"description": "Whether to include style info.",
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "colorFormat",
+ "description": "The color format to get config with (default: hex).",
+ "optional": true,
+ "$ref": "ColorFormat"
+ },
+ {
+ "name": "showAccessibilityInfo",
+ "description": "Whether to show accessibility info (default: true).",
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
@@ -10603,13 +14264,53 @@
}
]
},
+ {
+ "name": "getGridHighlightObjectsForTest",
+ "description": "For Persistent Grid testing.",
+ "parameters": [
+ {
+ "name": "nodeIds",
+ "description": "Ids of the node to get highlight object for.",
+ "type": "array",
+ "items": {
+ "$ref": "DOM.NodeId"
+ }
+ }
+ ],
+ "returns": [
+ {
+ "name": "highlights",
+ "description": "Grid Highlight data for the node ids provided.",
+ "type": "object"
+ }
+ ]
+ },
+ {
+ "name": "getSourceOrderHighlightObjectForTest",
+ "description": "For Source Order Viewer testing.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "description": "Id of the node to highlight.",
+ "$ref": "DOM.NodeId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "highlight",
+ "description": "Source order highlight data for the node id provided.",
+ "type": "object"
+ }
+ ]
+ },
{
"name": "hideHighlight",
"description": "Hides any highlight."
},
{
"name": "highlightFrame",
- "description": "Highlights owner element of the frame with given id.",
+ "description": "Highlights owner element of the frame with given id.\nDeprecated: Doesn't work reliablity and cannot be fixed due to process\nseparatation (the owner node might be in a different process). Determine\nthe owner node in the client and use highlightNode.",
+ "deprecated": true,
"parameters": [
{
"name": "frameId",
@@ -10716,13 +14417,42 @@
"name": "color",
"description": "The highlight fill color (default: transparent).",
"optional": true,
- "$ref": "DOM.RGBA"
+ "$ref": "DOM.RGBA"
+ },
+ {
+ "name": "outlineColor",
+ "description": "The highlight outline color (default: transparent).",
+ "optional": true,
+ "$ref": "DOM.RGBA"
+ }
+ ]
+ },
+ {
+ "name": "highlightSourceOrder",
+ "description": "Highlights the source order of the children of the DOM node with given id or with the given\nJavaScript object wrapper. Either nodeId or objectId must be specified.",
+ "parameters": [
+ {
+ "name": "sourceOrderConfig",
+ "description": "A descriptor for the appearance of the overlay drawing.",
+ "$ref": "SourceOrderConfig"
+ },
+ {
+ "name": "nodeId",
+ "description": "Identifier of the node to highlight.",
+ "optional": true,
+ "$ref": "DOM.NodeId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "Identifier of the backend node to highlight.",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
},
{
- "name": "outlineColor",
- "description": "The highlight outline color (default: transparent).",
+ "name": "objectId",
+ "description": "JavaScript object id of the node to be highlighted.",
"optional": true,
- "$ref": "DOM.RGBA"
+ "$ref": "Runtime.RemoteObjectId"
}
]
},
@@ -10787,6 +14517,59 @@
}
]
},
+ {
+ "name": "setShowGridOverlays",
+ "description": "Highlight multiple elements with the CSS Grid overlay.",
+ "parameters": [
+ {
+ "name": "gridNodeHighlightConfigs",
+ "description": "An array of node identifiers and descriptors for the highlight appearance.",
+ "type": "array",
+ "items": {
+ "$ref": "GridNodeHighlightConfig"
+ }
+ }
+ ]
+ },
+ {
+ "name": "setShowFlexOverlays",
+ "parameters": [
+ {
+ "name": "flexNodeHighlightConfigs",
+ "description": "An array of node identifiers and descriptors for the highlight appearance.",
+ "type": "array",
+ "items": {
+ "$ref": "FlexNodeHighlightConfig"
+ }
+ }
+ ]
+ },
+ {
+ "name": "setShowScrollSnapOverlays",
+ "parameters": [
+ {
+ "name": "scrollSnapHighlightConfigs",
+ "description": "An array of node identifiers and descriptors for the highlight appearance.",
+ "type": "array",
+ "items": {
+ "$ref": "ScrollSnapHighlightConfig"
+ }
+ }
+ ]
+ },
+ {
+ "name": "setShowContainerQueryOverlays",
+ "parameters": [
+ {
+ "name": "containerQueryHighlightConfigs",
+ "description": "An array of node identifiers and descriptors for the highlight appearance.",
+ "type": "array",
+ "items": {
+ "$ref": "ContainerQueryHighlightConfig"
+ }
+ }
+ ]
+ },
{
"name": "setShowPaintRects",
"description": "Requests that backend shows paint rectangles",
@@ -10810,94 +14593,440 @@
]
},
{
- "name": "setShowScrollBottleneckRects",
- "description": "Requests that backend shows scroll bottleneck rects",
- "parameters": [
- {
- "name": "show",
- "description": "True for showing scroll bottleneck rects",
- "type": "boolean"
- }
+ "name": "setShowScrollBottleneckRects",
+ "description": "Requests that backend shows scroll bottleneck rects",
+ "parameters": [
+ {
+ "name": "show",
+ "description": "True for showing scroll bottleneck rects",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setShowHitTestBorders",
+ "description": "Deprecated, no longer has any effect.",
+ "deprecated": true,
+ "parameters": [
+ {
+ "name": "show",
+ "description": "True for showing hit-test borders",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setShowWebVitals",
+ "description": "Request that backend shows an overlay with web vital metrics.",
+ "parameters": [
+ {
+ "name": "show",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setShowViewportSizeOnResize",
+ "description": "Paints viewport size upon main frame resize.",
+ "parameters": [
+ {
+ "name": "show",
+ "description": "Whether to paint size or not.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "setShowHinge",
+ "description": "Add a dual screen device hinge",
+ "parameters": [
+ {
+ "name": "hingeConfig",
+ "description": "hinge data, null means hideHinge",
+ "optional": true,
+ "$ref": "HingeConfig"
+ }
+ ]
+ },
+ {
+ "name": "setShowIsolatedElements",
+ "description": "Show elements in isolation mode with overlays.",
+ "parameters": [
+ {
+ "name": "isolatedElementHighlightConfigs",
+ "description": "An array of node identifiers and descriptors for the highlight appearance.",
+ "type": "array",
+ "items": {
+ "$ref": "IsolatedElementHighlightConfig"
+ }
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "inspectNodeRequested",
+ "description": "Fired when the node should be inspected. This happens after call to `setInspectMode` or when\nuser manually inspects an element.",
+ "parameters": [
+ {
+ "name": "backendNodeId",
+ "description": "Id of the node to inspect.",
+ "$ref": "DOM.BackendNodeId"
+ }
+ ]
+ },
+ {
+ "name": "nodeHighlightRequested",
+ "description": "Fired when the node should be highlighted. This happens after call to `setInspectMode`.",
+ "parameters": [
+ {
+ "name": "nodeId",
+ "$ref": "DOM.NodeId"
+ }
+ ]
+ },
+ {
+ "name": "screenshotRequested",
+ "description": "Fired when user asks to capture screenshot of some area on the page.",
+ "parameters": [
+ {
+ "name": "viewport",
+ "description": "Viewport to capture, in device independent pixels (dip).",
+ "$ref": "Page.Viewport"
+ }
+ ]
+ },
+ {
+ "name": "inspectModeCanceled",
+ "description": "Fired when user cancels the inspect mode."
+ }
+ ]
+ },
+ {
+ "domain": "Page",
+ "description": "Actions and events related to the inspected page belong to the page domain.",
+ "dependencies": [
+ "Debugger",
+ "DOM",
+ "IO",
+ "Network",
+ "Runtime"
+ ],
+ "types": [
+ {
+ "id": "FrameId",
+ "description": "Unique frame identifier.",
+ "type": "string"
+ },
+ {
+ "id": "AdFrameType",
+ "description": "Indicates whether a frame has been identified as an ad.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "none",
+ "child",
+ "root"
+ ]
+ },
+ {
+ "id": "AdFrameExplanation",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "ParentIsAd",
+ "CreatedByAdScript",
+ "MatchedBlockingRule"
+ ]
+ },
+ {
+ "id": "AdFrameStatus",
+ "description": "Indicates whether a frame has been identified as an ad and why.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "adFrameType",
+ "$ref": "AdFrameType"
+ },
+ {
+ "name": "explanations",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "AdFrameExplanation"
+ }
+ }
+ ]
+ },
+ {
+ "id": "SecureContextType",
+ "description": "Indicates whether the frame is a secure context and why it is the case.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Secure",
+ "SecureLocalhost",
+ "InsecureScheme",
+ "InsecureAncestor"
+ ]
+ },
+ {
+ "id": "CrossOriginIsolatedContextType",
+ "description": "Indicates whether the frame is cross-origin isolated and why it is the case.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Isolated",
+ "NotIsolated",
+ "NotIsolatedFeatureDisabled"
+ ]
+ },
+ {
+ "id": "GatedAPIFeatures",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "SharedArrayBuffers",
+ "SharedArrayBuffersTransferAllowed",
+ "PerformanceMeasureMemory",
+ "PerformanceProfile"
+ ]
+ },
+ {
+ "id": "PermissionsPolicyFeature",
+ "description": "All Permissions Policy features. This enum should match the one defined\nin third_party/blink/renderer/core/permissions_policy/permissions_policy_features.json5.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "accelerometer",
+ "ambient-light-sensor",
+ "attribution-reporting",
+ "autoplay",
+ "camera",
+ "ch-dpr",
+ "ch-device-memory",
+ "ch-downlink",
+ "ch-ect",
+ "ch-prefers-color-scheme",
+ "ch-rtt",
+ "ch-ua",
+ "ch-ua-arch",
+ "ch-ua-bitness",
+ "ch-ua-platform",
+ "ch-ua-model",
+ "ch-ua-mobile",
+ "ch-ua-full",
+ "ch-ua-full-version",
+ "ch-ua-full-version-list",
+ "ch-ua-platform-version",
+ "ch-ua-reduced",
+ "ch-ua-wow64",
+ "ch-viewport-height",
+ "ch-viewport-width",
+ "ch-width",
+ "ch-partitioned-cookies",
+ "clipboard-read",
+ "clipboard-write",
+ "cross-origin-isolated",
+ "direct-sockets",
+ "display-capture",
+ "document-domain",
+ "encrypted-media",
+ "execution-while-out-of-viewport",
+ "execution-while-not-rendered",
+ "focus-without-user-activation",
+ "fullscreen",
+ "frobulate",
+ "gamepad",
+ "geolocation",
+ "gyroscope",
+ "hid",
+ "idle-detection",
+ "join-ad-interest-group",
+ "keyboard-map",
+ "magnetometer",
+ "microphone",
+ "midi",
+ "otp-credentials",
+ "payment",
+ "picture-in-picture",
+ "publickey-credentials-get",
+ "run-ad-auction",
+ "screen-wake-lock",
+ "serial",
+ "shared-autofill",
+ "storage-access-api",
+ "sync-xhr",
+ "trust-token-redemption",
+ "usb",
+ "vertical-scroll",
+ "web-share",
+ "window-placement",
+ "xr-spatial-tracking"
+ ]
+ },
+ {
+ "id": "PermissionsPolicyBlockReason",
+ "description": "Reason for a permissions policy feature to be disabled.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Header",
+ "IframeAttribute",
+ "InFencedFrameTree"
+ ]
+ },
+ {
+ "id": "PermissionsPolicyBlockLocator",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "frameId",
+ "$ref": "FrameId"
+ },
+ {
+ "name": "blockReason",
+ "$ref": "PermissionsPolicyBlockReason"
+ }
+ ]
+ },
+ {
+ "id": "PermissionsPolicyFeatureState",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "feature",
+ "$ref": "PermissionsPolicyFeature"
+ },
+ {
+ "name": "allowed",
+ "type": "boolean"
+ },
+ {
+ "name": "locator",
+ "optional": true,
+ "$ref": "PermissionsPolicyBlockLocator"
+ }
+ ]
+ },
+ {
+ "id": "OriginTrialTokenStatus",
+ "description": "Origin Trial(https://www.chromium.org/blink/origin-trials) support.\nStatus for an Origin Trial token.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Success",
+ "NotSupported",
+ "Insecure",
+ "Expired",
+ "WrongOrigin",
+ "InvalidSignature",
+ "Malformed",
+ "WrongVersion",
+ "FeatureDisabled",
+ "TokenDisabled",
+ "FeatureDisabledForUser",
+ "UnknownTrial"
+ ]
+ },
+ {
+ "id": "OriginTrialStatus",
+ "description": "Status for an Origin Trial.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Enabled",
+ "ValidTokenNotProvided",
+ "OSNotSupported",
+ "TrialNotAllowed"
+ ]
+ },
+ {
+ "id": "OriginTrialUsageRestriction",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "None",
+ "Subset"
]
},
{
- "name": "setShowHitTestBorders",
- "description": "Requests that backend shows hit-test borders on layers",
- "parameters": [
+ "id": "OriginTrialToken",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "show",
- "description": "True for showing hit-test borders",
+ "name": "origin",
+ "type": "string"
+ },
+ {
+ "name": "matchSubDomains",
"type": "boolean"
- }
- ]
- },
- {
- "name": "setShowViewportSizeOnResize",
- "description": "Paints viewport size upon main frame resize.",
- "parameters": [
+ },
{
- "name": "show",
- "description": "Whether to paint size or not.",
+ "name": "trialName",
+ "type": "string"
+ },
+ {
+ "name": "expiryTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "isThirdParty",
"type": "boolean"
- }
- ]
- }
- ],
- "events": [
- {
- "name": "inspectNodeRequested",
- "description": "Fired when the node should be inspected. This happens after call to `setInspectMode` or when\nuser manually inspects an element.",
- "parameters": [
+ },
{
- "name": "backendNodeId",
- "description": "Id of the node to inspect.",
- "$ref": "DOM.BackendNodeId"
+ "name": "usageRestriction",
+ "$ref": "OriginTrialUsageRestriction"
}
]
},
{
- "name": "nodeHighlightRequested",
- "description": "Fired when the node should be highlighted. This happens after call to `setInspectMode`.",
- "parameters": [
+ "id": "OriginTrialTokenWithStatus",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "nodeId",
- "$ref": "DOM.NodeId"
+ "name": "rawTokenText",
+ "type": "string"
+ },
+ {
+ "name": "parsedToken",
+ "description": "`parsedToken` is present only when the token is extractable and\nparsable.",
+ "optional": true,
+ "$ref": "OriginTrialToken"
+ },
+ {
+ "name": "status",
+ "$ref": "OriginTrialTokenStatus"
}
]
},
{
- "name": "screenshotRequested",
- "description": "Fired when user asks to capture screenshot of some area on the page.",
- "parameters": [
+ "id": "OriginTrial",
+ "experimental": true,
+ "type": "object",
+ "properties": [
{
- "name": "viewport",
- "description": "Viewport to capture, in device independent pixels (dip).",
- "$ref": "Page.Viewport"
+ "name": "trialName",
+ "type": "string"
+ },
+ {
+ "name": "status",
+ "$ref": "OriginTrialStatus"
+ },
+ {
+ "name": "tokensWithStatus",
+ "type": "array",
+ "items": {
+ "$ref": "OriginTrialTokenWithStatus"
+ }
}
]
},
- {
- "name": "inspectModeCanceled",
- "description": "Fired when user cancels the inspect mode."
- }
- ]
- },
- {
- "domain": "Page",
- "description": "Actions and events related to the inspected page belong to the page domain.",
- "dependencies": [
- "Debugger",
- "DOM",
- "IO",
- "Network",
- "Runtime"
- ],
- "types": [
- {
- "id": "FrameId",
- "description": "Unique frame identifier.",
- "type": "string"
- },
{
"id": "Frame",
"description": "Information about the Frame on the page.",
@@ -10906,13 +15035,13 @@
{
"name": "id",
"description": "Frame unique identifier.",
- "type": "string"
+ "$ref": "FrameId"
},
{
"name": "parentId",
"description": "Parent frame identifier.",
"optional": true,
- "type": "string"
+ "$ref": "FrameId"
},
{
"name": "loaderId",
@@ -10937,6 +15066,12 @@
"optional": true,
"type": "string"
},
+ {
+ "name": "domainAndRegistry",
+ "description": "Frame document's registered domain, taking the public suffixes list into account.\nExtracted from the Frame's url.\nExample URLs: http://www.google.com/file.html -> \"google.com\"\n http://a.b.co.uk/file.html -> \"b.co.uk\"",
+ "experimental": true,
+ "type": "string"
+ },
{
"name": "securityOrigin",
"description": "Frame document's security origin.",
@@ -10953,6 +15088,34 @@
"experimental": true,
"optional": true,
"type": "string"
+ },
+ {
+ "name": "adFrameStatus",
+ "description": "Indicates whether this frame was tagged as an ad and why.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "AdFrameStatus"
+ },
+ {
+ "name": "secureContextType",
+ "description": "Indicates whether the main document is a secure context and explains why that is the case.",
+ "experimental": true,
+ "$ref": "SecureContextType"
+ },
+ {
+ "name": "crossOriginIsolatedContextType",
+ "description": "Indicates whether this is a cross origin isolated context.",
+ "experimental": true,
+ "$ref": "CrossOriginIsolatedContextType"
+ },
+ {
+ "name": "gatedAPIFeatures",
+ "description": "Indicated which gated APIs / features are available.",
+ "experimental": true,
+ "type": "array",
+ "items": {
+ "$ref": "GatedAPIFeatures"
+ }
}
]
},
@@ -11193,6 +15356,19 @@
}
]
},
+ {
+ "id": "AppManifestParsedProperties",
+ "description": "Parsed app manifest properties.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "scope",
+ "description": "Computed scope value",
+ "type": "string"
+ }
+ ]
+ },
{
"id": "LayoutViewport",
"description": "Layout viewport position and dimensions.",
@@ -11351,37 +15527,337 @@
]
},
{
- "id": "FontSizes",
- "description": "Default font sizes.",
+ "id": "ScriptFontFamilies",
+ "description": "Font families collection for a script.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "script",
+ "description": "Name of the script which these font families are defined for.",
+ "type": "string"
+ },
+ {
+ "name": "fontFamilies",
+ "description": "Generic font families collection for the script.",
+ "$ref": "FontFamilies"
+ }
+ ]
+ },
+ {
+ "id": "FontSizes",
+ "description": "Default font sizes.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "standard",
+ "description": "Default standard font size.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "fixed",
+ "description": "Default fixed font size.",
+ "optional": true,
+ "type": "integer"
+ }
+ ]
+ },
+ {
+ "id": "ClientNavigationReason",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "formSubmissionGet",
+ "formSubmissionPost",
+ "httpHeaderRefresh",
+ "scriptInitiated",
+ "metaTagRefresh",
+ "pageBlockInterstitial",
+ "reload",
+ "anchorClick"
+ ]
+ },
+ {
+ "id": "ClientNavigationDisposition",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "currentTab",
+ "newTab",
+ "newWindow",
+ "download"
+ ]
+ },
+ {
+ "id": "InstallabilityErrorArgument",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "description": "Argument name (e.g. name:'minimum-icon-size-in-pixels').",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "description": "Argument value (e.g. value:'64').",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "InstallabilityError",
+ "description": "The installability error",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "errorId",
+ "description": "The error id (e.g. 'manifest-missing-suitable-icon').",
+ "type": "string"
+ },
+ {
+ "name": "errorArguments",
+ "description": "The list of error arguments (e.g. {name:'minimum-icon-size-in-pixels', value:'64'}).",
+ "type": "array",
+ "items": {
+ "$ref": "InstallabilityErrorArgument"
+ }
+ }
+ ]
+ },
+ {
+ "id": "ReferrerPolicy",
+ "description": "The referring-policy used for the navigation.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "noReferrer",
+ "noReferrerWhenDowngrade",
+ "origin",
+ "originWhenCrossOrigin",
+ "sameOrigin",
+ "strictOrigin",
+ "strictOriginWhenCrossOrigin",
+ "unsafeUrl"
+ ]
+ },
+ {
+ "id": "CompilationCacheParams",
+ "description": "Per-script compilation cache parameters for `Page.produceCompilationCache`",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "url",
+ "description": "The URL of the script to produce a compilation cache entry for.",
+ "type": "string"
+ },
+ {
+ "name": "eager",
+ "description": "A hint to the backend whether eager compilation is recommended.\n(the actual compilation mode used is upon backend discretion).",
+ "optional": true,
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "id": "NavigationType",
+ "description": "The type of a frameNavigated event.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "Navigation",
+ "BackForwardCacheRestore"
+ ]
+ },
+ {
+ "id": "BackForwardCacheNotRestoredReason",
+ "description": "List of not restored reasons for back-forward cache.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "NotPrimaryMainFrame",
+ "BackForwardCacheDisabled",
+ "RelatedActiveContentsExist",
+ "HTTPStatusNotOK",
+ "SchemeNotHTTPOrHTTPS",
+ "Loading",
+ "WasGrantedMediaAccess",
+ "DisableForRenderFrameHostCalled",
+ "DomainNotAllowed",
+ "HTTPMethodNotGET",
+ "SubframeIsNavigating",
+ "Timeout",
+ "CacheLimit",
+ "JavaScriptExecution",
+ "RendererProcessKilled",
+ "RendererProcessCrashed",
+ "GrantedMediaStreamAccess",
+ "SchedulerTrackedFeatureUsed",
+ "ConflictingBrowsingInstance",
+ "CacheFlushed",
+ "ServiceWorkerVersionActivation",
+ "SessionRestored",
+ "ServiceWorkerPostMessage",
+ "EnteredBackForwardCacheBeforeServiceWorkerHostAdded",
+ "RenderFrameHostReused_SameSite",
+ "RenderFrameHostReused_CrossSite",
+ "ServiceWorkerClaim",
+ "IgnoreEventAndEvict",
+ "HaveInnerContents",
+ "TimeoutPuttingInCache",
+ "BackForwardCacheDisabledByLowMemory",
+ "BackForwardCacheDisabledByCommandLine",
+ "NetworkRequestDatapipeDrainedAsBytesConsumer",
+ "NetworkRequestRedirected",
+ "NetworkRequestTimeout",
+ "NetworkExceedsBufferLimit",
+ "NavigationCancelledWhileRestoring",
+ "NotMostRecentNavigationEntry",
+ "BackForwardCacheDisabledForPrerender",
+ "UserAgentOverrideDiffers",
+ "ForegroundCacheLimit",
+ "BrowsingInstanceNotSwapped",
+ "BackForwardCacheDisabledForDelegate",
+ "OptInUnloadHeaderNotPresent",
+ "UnloadHandlerExistsInMainFrame",
+ "UnloadHandlerExistsInSubFrame",
+ "ServiceWorkerUnregistration",
+ "CacheControlNoStore",
+ "CacheControlNoStoreCookieModified",
+ "CacheControlNoStoreHTTPOnlyCookieModified",
+ "NoResponseHead",
+ "Unknown",
+ "ActivationNavigationsDisallowedForBug1234857",
+ "WebSocket",
+ "WebTransport",
+ "WebRTC",
+ "MainResourceHasCacheControlNoStore",
+ "MainResourceHasCacheControlNoCache",
+ "SubresourceHasCacheControlNoStore",
+ "SubresourceHasCacheControlNoCache",
+ "ContainsPlugins",
+ "DocumentLoaded",
+ "DedicatedWorkerOrWorklet",
+ "OutstandingNetworkRequestOthers",
+ "OutstandingIndexedDBTransaction",
+ "RequestedNotificationsPermission",
+ "RequestedMIDIPermission",
+ "RequestedAudioCapturePermission",
+ "RequestedVideoCapturePermission",
+ "RequestedBackForwardCacheBlockedSensors",
+ "RequestedBackgroundWorkPermission",
+ "BroadcastChannel",
+ "IndexedDBConnection",
+ "WebXR",
+ "SharedWorker",
+ "WebLocks",
+ "WebHID",
+ "WebShare",
+ "RequestedStorageAccessGrant",
+ "WebNfc",
+ "OutstandingNetworkRequestFetch",
+ "OutstandingNetworkRequestXHR",
+ "AppBanner",
+ "Printing",
+ "WebDatabase",
+ "PictureInPicture",
+ "Portal",
+ "SpeechRecognizer",
+ "IdleManager",
+ "PaymentManager",
+ "SpeechSynthesis",
+ "KeyboardLock",
+ "WebOTPService",
+ "OutstandingNetworkRequestDirectSocket",
+ "InjectedJavascript",
+ "InjectedStyleSheet",
+ "Dummy",
+ "ContentSecurityHandler",
+ "ContentWebAuthenticationAPI",
+ "ContentFileChooser",
+ "ContentSerial",
+ "ContentFileSystemAccess",
+ "ContentMediaDevicesDispatcherHost",
+ "ContentWebBluetooth",
+ "ContentWebUSB",
+ "ContentMediaSession",
+ "ContentMediaSessionService",
+ "ContentScreenReader",
+ "EmbedderPopupBlockerTabHelper",
+ "EmbedderSafeBrowsingTriggeredPopupBlocker",
+ "EmbedderSafeBrowsingThreatDetails",
+ "EmbedderAppBannerManager",
+ "EmbedderDomDistillerViewerSource",
+ "EmbedderDomDistillerSelfDeletingRequestDelegate",
+ "EmbedderOomInterventionTabHelper",
+ "EmbedderOfflinePage",
+ "EmbedderChromePasswordManagerClientBindCredentialManager",
+ "EmbedderPermissionRequestManager",
+ "EmbedderModalDialog",
+ "EmbedderExtensions",
+ "EmbedderExtensionMessaging",
+ "EmbedderExtensionMessagingForOpenPort",
+ "EmbedderExtensionSentMessageToCachedFrame"
+ ]
+ },
+ {
+ "id": "BackForwardCacheNotRestoredReasonType",
+ "description": "Types of not restored reasons for back-forward cache.",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "SupportPending",
+ "PageSupportNeeded",
+ "Circumstantial"
+ ]
+ },
+ {
+ "id": "BackForwardCacheNotRestoredExplanation",
"experimental": true,
"type": "object",
"properties": [
{
- "name": "standard",
- "description": "Default standard font size.",
- "optional": true,
- "type": "integer"
+ "name": "type",
+ "description": "Type of the reason",
+ "$ref": "BackForwardCacheNotRestoredReasonType"
},
{
- "name": "fixed",
- "description": "Default fixed font size.",
- "optional": true,
- "type": "integer"
+ "name": "reason",
+ "description": "Not restored reason",
+ "$ref": "BackForwardCacheNotRestoredReason"
}
]
},
{
- "id": "ClientNavigationReason",
+ "id": "BackForwardCacheNotRestoredExplanationTree",
"experimental": true,
- "type": "string",
- "enum": [
- "formSubmissionGet",
- "formSubmissionPost",
- "httpHeaderRefresh",
- "scriptInitiated",
- "metaTagRefresh",
- "pageBlockInterstitial",
- "reload"
+ "type": "object",
+ "properties": [
+ {
+ "name": "url",
+ "description": "URL of each frame",
+ "type": "string"
+ },
+ {
+ "name": "explanations",
+ "description": "Not restored reasons of each frame",
+ "type": "array",
+ "items": {
+ "$ref": "BackForwardCacheNotRestoredExplanation"
+ }
+ },
+ {
+ "name": "children",
+ "description": "Array of children frame",
+ "type": "array",
+ "items": {
+ "$ref": "BackForwardCacheNotRestoredExplanationTree"
+ }
+ }
]
}
],
@@ -11419,6 +15895,13 @@
"experimental": true,
"optional": true,
"type": "string"
+ },
+ {
+ "name": "includeCommandLineAPI",
+ "description": "Specifies whether command line API should be available to the script, defaults\nto false.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
@@ -11444,7 +15927,8 @@
"type": "string",
"enum": [
"jpeg",
- "png"
+ "png",
+ "webp"
]
},
{
@@ -11465,12 +15949,19 @@
"experimental": true,
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "captureBeyondViewport",
+ "description": "Capture the screenshot beyond the viewport. Defaults to false.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
{
"name": "data",
- "description": "Base64-encoded image data.",
+ "description": "Base64-encoded image data. (Encoded as a base64 string when passed over JSON)",
"type": "string"
}
]
@@ -11500,7 +15991,7 @@
},
{
"name": "clearDeviceMetricsOverride",
- "description": "Clears the overriden device metrics.",
+ "description": "Clears the overridden device metrics.",
"experimental": true,
"deprecated": true,
"redirect": "Emulation"
@@ -11514,7 +16005,7 @@
},
{
"name": "clearGeolocationOverride",
- "description": "Clears the overriden Geolocation Position and Error.",
+ "description": "Clears the overridden Geolocation Position and Error.",
"deprecated": true,
"redirect": "Emulation"
},
@@ -11595,6 +16086,13 @@
"description": "Manifest content.",
"optional": true,
"type": "string"
+ },
+ {
+ "name": "parsed",
+ "description": "Parsed manifest properties",
+ "experimental": true,
+ "optional": true,
+ "$ref": "AppManifestParsedProperties"
}
]
},
@@ -11603,14 +16101,44 @@
"experimental": true,
"returns": [
{
- "name": "errors",
+ "name": "installabilityErrors",
"type": "array",
"items": {
- "type": "string"
+ "$ref": "InstallabilityError"
}
}
]
},
+ {
+ "name": "getManifestIcons",
+ "experimental": true,
+ "returns": [
+ {
+ "name": "primaryIcon",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "getAppId",
+ "description": "Returns the unique (PWA) app id.\nOnly returns values if the feature flag 'WebAppEnableManifestId' is enabled",
+ "experimental": true,
+ "returns": [
+ {
+ "name": "appId",
+ "description": "App id, either from manifest's id attribute or computed from start_url",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "recommendedId",
+ "description": "Recommendation for manifest's id attribute to match current id computed from start_url",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
{
"name": "getCookies",
"description": "Returns all browser cookies. Depending on the backend support, will return detailed cookie\ninformation in the `cookies` field.",
@@ -11645,17 +16173,35 @@
"returns": [
{
"name": "layoutViewport",
- "description": "Metrics relating to the layout viewport.",
+ "description": "Deprecated metrics relating to the layout viewport. Can be in DP or in CSS pixels depending on the `enable-use-zoom-for-dsf` flag. Use `cssLayoutViewport` instead.",
+ "deprecated": true,
"$ref": "LayoutViewport"
},
{
"name": "visualViewport",
- "description": "Metrics relating to the visual viewport.",
+ "description": "Deprecated metrics relating to the visual viewport. Can be in DP or in CSS pixels depending on the `enable-use-zoom-for-dsf` flag. Use `cssVisualViewport` instead.",
+ "deprecated": true,
"$ref": "VisualViewport"
},
{
"name": "contentSize",
- "description": "Size of scrollable area.",
+ "description": "Deprecated size of scrollable area. Can be in DP or in CSS pixels depending on the `enable-use-zoom-for-dsf` flag. Use `cssContentSize` instead.",
+ "deprecated": true,
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "cssLayoutViewport",
+ "description": "Metrics relating to the layout viewport in CSS pixels.",
+ "$ref": "LayoutViewport"
+ },
+ {
+ "name": "cssVisualViewport",
+ "description": "Metrics relating to the visual viewport in CSS pixels.",
+ "$ref": "VisualViewport"
+ },
+ {
+ "name": "cssContentSize",
+ "description": "Size of scrollable area in CSS pixels.",
"$ref": "DOM.Rect"
}
]
@@ -11767,6 +16313,13 @@
"description": "Frame id to navigate, if not specified navigates the top frame.",
"optional": true,
"$ref": "FrameId"
+ },
+ {
+ "name": "referrerPolicy",
+ "description": "Referrer-policy used for the navigation.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "ReferrerPolicy"
}
],
"returns": [
@@ -11909,7 +16462,7 @@
"returns": [
{
"name": "data",
- "description": "Base64-encoded pdf data. Empty if |returnAsStream| is specified.",
+ "description": "Base64-encoded pdf data. Empty if |returnAsStream| is specified. (Encoded as a base64 string when passed over JSON)",
"type": "string"
},
{
@@ -12041,6 +16594,46 @@
}
]
},
+ {
+ "name": "getPermissionsPolicyState",
+ "description": "Get Permissions Policy state on given frame.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "frameId",
+ "$ref": "FrameId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "states",
+ "type": "array",
+ "items": {
+ "$ref": "PermissionsPolicyFeatureState"
+ }
+ }
+ ]
+ },
+ {
+ "name": "getOriginTrials",
+ "description": "Get Origin Trials on given frame.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "frameId",
+ "$ref": "FrameId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "originTrials",
+ "type": "array",
+ "items": {
+ "$ref": "OriginTrial"
+ }
+ }
+ ]
+ },
{
"name": "setDeviceMetricsOverride",
"description": "Overrides the values of device screen dimensions (window.screen.width, window.screen.height,\nwindow.innerWidth, window.innerHeight, and \"device-width\"/\"device-height\"-related CSS media\nquery results).",
@@ -12151,6 +16744,15 @@
"name": "fontFamilies",
"description": "Specifies font families to set. If a font family is not specified, it won't be changed.",
"$ref": "FontFamilies"
+ },
+ {
+ "name": "forScripts",
+ "description": "Specifies font families to set for individual scripts.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "ScriptFontFamilies"
+ }
}
]
},
@@ -12186,6 +16788,7 @@
"name": "setDownloadBehavior",
"description": "Set the behavior when downloading a file.",
"experimental": true,
+ "deprecated": true,
"parameters": [
{
"name": "behavior",
@@ -12199,7 +16802,7 @@
},
{
"name": "downloadPath",
- "description": "The default path to save downloaded files to. This is requred if behavior is set to 'allow'",
+ "description": "The default path to save downloaded files to. This is required if behavior is set to 'allow'",
"optional": true,
"type": "string"
}
@@ -12344,13 +16947,16 @@
"experimental": true
},
{
- "name": "setProduceCompilationCache",
- "description": "Forces compilation cache to be generated for every subresource script.",
+ "name": "produceCompilationCache",
+ "description": "Requests backend to produce compilation cache for the specified scripts.\n`scripts` are appeneded to the list of scripts for which the cache\nwould be produced. The list may be reset during page navigation.\nWhen script with a matching URL is encountered, the cache is optionally\nproduced upon backend discretion, based on internal heuristics.\nSee also: `Page.compilationCacheProduced`.",
"experimental": true,
"parameters": [
{
- "name": "enabled",
- "type": "boolean"
+ "name": "scripts",
+ "type": "array",
+ "items": {
+ "$ref": "CompilationCacheParams"
+ }
}
]
},
@@ -12365,7 +16971,7 @@
},
{
"name": "data",
- "description": "Base64-encoded data",
+ "description": "Base64-encoded data (Encoded as a base64 string when passed over JSON)",
"type": "string"
}
]
@@ -12375,6 +16981,22 @@
"description": "Clears seeded compilation cache.",
"experimental": true
},
+ {
+ "name": "setSPCTransactionMode",
+ "description": "Sets the Secure Payment Confirmation transaction mode.\nhttps://w3c.github.io/secure-payment-confirmation/#sctn-automation-set-spc-transaction-mode",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "mode",
+ "type": "string",
+ "enum": [
+ "none",
+ "autoaccept",
+ "autoreject"
+ ]
+ }
+ ]
+ },
{
"name": "generateTestReport",
"description": "Generates a report for testing.",
@@ -12400,7 +17022,7 @@
},
{
"name": "setInterceptFileChooserDialog",
- "description": "Intercept file chooser requests and transfer control to protocol clients.\nWhen file chooser interception is enabled, native file chooser dialog is not shown.\nInstead, a protocol event `Page.fileChooserOpened` is emitted.\nFile chooser can be handled with `page.handleFileChooser` command.",
+ "description": "Intercept file chooser requests and transfer control to protocol clients.\nWhen file chooser interception is enabled, native file chooser dialog is not shown.\nInstead, a protocol event `Page.fileChooserOpened` is emitted.",
"experimental": true,
"parameters": [
{
@@ -12408,31 +17030,6 @@
"type": "boolean"
}
]
- },
- {
- "name": "handleFileChooser",
- "description": "Accepts or cancels an intercepted file chooser dialog.",
- "experimental": true,
- "parameters": [
- {
- "name": "action",
- "type": "string",
- "enum": [
- "accept",
- "cancel",
- "fallback"
- ]
- },
- {
- "name": "files",
- "description": "Array of absolute file paths to set, only respected with `accept` action.",
- "optional": true,
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- ]
}
],
"events": [
@@ -12449,8 +17046,21 @@
"name": "fileChooserOpened",
"description": "Emitted only when `page.interceptFileChooser` is enabled.",
"parameters": [
+ {
+ "name": "frameId",
+ "description": "Id of the frame containing input node.",
+ "experimental": true,
+ "$ref": "FrameId"
+ },
+ {
+ "name": "backendNodeId",
+ "description": "Input node id.",
+ "experimental": true,
+ "$ref": "DOM.BackendNodeId"
+ },
{
"name": "mode",
+ "description": "Input mode.",
"type": "string",
"enum": [
"selectSingle",
@@ -12501,12 +17111,38 @@
"name": "frameId",
"description": "Id of the frame that has been detached.",
"$ref": "FrameId"
+ },
+ {
+ "name": "reason",
+ "experimental": true,
+ "type": "string",
+ "enum": [
+ "remove",
+ "swap"
+ ]
}
]
},
{
"name": "frameNavigated",
"description": "Fired once navigation of the frame has completed. Frame is now associated with the new loader.",
+ "parameters": [
+ {
+ "name": "frame",
+ "description": "Frame object.",
+ "$ref": "Frame"
+ },
+ {
+ "name": "type",
+ "experimental": true,
+ "$ref": "NavigationType"
+ }
+ ]
+ },
+ {
+ "name": "documentOpened",
+ "description": "Fired when opening document to write to.",
+ "experimental": true,
"parameters": [
{
"name": "frame",
@@ -12538,6 +17174,11 @@
"name": "url",
"description": "The destination URL for the requested navigation.",
"type": "string"
+ },
+ {
+ "name": "disposition",
+ "description": "The disposition for the navigation.",
+ "$ref": "ClientNavigationDisposition"
}
]
},
@@ -12559,16 +17200,7 @@
{
"name": "reason",
"description": "The reason for the navigation.",
- "type": "string",
- "enum": [
- "formSubmissionGet",
- "formSubmissionPost",
- "httpHeaderRefresh",
- "scriptInitiated",
- "metaTagRefresh",
- "pageBlockInterstitial",
- "reload"
- ]
+ "$ref": "ClientNavigationReason"
},
{
"name": "url",
@@ -12603,18 +17235,62 @@
},
{
"name": "downloadWillBegin",
- "description": "Fired when page is about to start a download.",
+ "description": "Fired when page is about to start a download.\nDeprecated. Use Browser.downloadWillBegin instead.",
"experimental": true,
+ "deprecated": true,
"parameters": [
{
"name": "frameId",
"description": "Id of the frame that caused download to begin.",
"$ref": "FrameId"
},
+ {
+ "name": "guid",
+ "description": "Global unique identifier of the download.",
+ "type": "string"
+ },
{
"name": "url",
"description": "URL of the resource being downloaded.",
"type": "string"
+ },
+ {
+ "name": "suggestedFilename",
+ "description": "Suggested file name of the resource (the actual name of the file saved on disk may differ).",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "downloadProgress",
+ "description": "Fired when download makes progress. Last call has |done| == true.\nDeprecated. Use Browser.downloadProgress instead.",
+ "experimental": true,
+ "deprecated": true,
+ "parameters": [
+ {
+ "name": "guid",
+ "description": "Global unique identifier of the download.",
+ "type": "string"
+ },
+ {
+ "name": "totalBytes",
+ "description": "Total expected bytes to download.",
+ "type": "number"
+ },
+ {
+ "name": "receivedBytes",
+ "description": "Total bytes received.",
+ "type": "number"
+ },
+ {
+ "name": "state",
+ "description": "Download status.",
+ "type": "string",
+ "enum": [
+ "inProgress",
+ "completed",
+ "canceled"
+ ]
}
]
},
@@ -12689,12 +17365,43 @@
"$ref": "Network.LoaderId"
},
{
- "name": "name",
- "type": "string"
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "timestamp",
+ "$ref": "Network.MonotonicTime"
+ }
+ ]
+ },
+ {
+ "name": "backForwardCacheNotUsed",
+ "description": "Fired for failed bfcache history navigations if BackForwardCache feature is enabled. Do\nnot assume any ordering with the Page.frameNavigated event. This event is fired only for\nmain-frame history navigation where the document changes (non-same-document navigations),\nwhen bfcache navigation fails.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "loaderId",
+ "description": "The loader id for the associated navgation.",
+ "$ref": "Network.LoaderId"
+ },
+ {
+ "name": "frameId",
+ "description": "The frame id of the associated frame.",
+ "$ref": "FrameId"
+ },
+ {
+ "name": "notRestoredExplanations",
+ "description": "Array of reasons why the page could not be cached. This must not be empty.",
+ "type": "array",
+ "items": {
+ "$ref": "BackForwardCacheNotRestoredExplanation"
+ }
},
{
- "name": "timestamp",
- "$ref": "Network.MonotonicTime"
+ "name": "notRestoredExplanationsTree",
+ "description": "Tree structure of reasons why the page could not be cached for each frame.",
+ "optional": true,
+ "$ref": "BackForwardCacheNotRestoredExplanationTree"
}
]
},
@@ -12731,7 +17438,7 @@
"parameters": [
{
"name": "data",
- "description": "Base64-encoded compressed image.",
+ "description": "Base64-encoded compressed image. (Encoded as a base64 string when passed over JSON)",
"type": "string"
},
{
@@ -12798,7 +17505,7 @@
},
{
"name": "data",
- "description": "Base64-encoded data",
+ "description": "Base64-encoded data (Encoded as a base64 string when passed over JSON)",
"type": "string"
}
]
@@ -12833,12 +17540,25 @@
},
{
"name": "enable",
- "description": "Enable collecting and reporting metrics."
+ "description": "Enable collecting and reporting metrics.",
+ "parameters": [
+ {
+ "name": "timeDomain",
+ "description": "Time domain to use for collecting and reporting duration metrics.",
+ "optional": true,
+ "type": "string",
+ "enum": [
+ "timeTicks",
+ "threadTicks"
+ ]
+ }
+ ]
},
{
"name": "setTimeDomain",
"description": "Sets time domain to use for collecting and reporting duration metrics.\nNote that this must be called before enabling metrics collection. Calling\nthis method while metrics collection is enabled returns an error.",
"experimental": true,
+ "deprecated": true,
"parameters": [
{
"name": "timeDomain",
@@ -12888,6 +17608,170 @@
}
]
},
+ {
+ "domain": "PerformanceTimeline",
+ "description": "Reporting of performance timeline events, as specified in\nhttps://w3c.github.io/performance-timeline/#dom-performanceobserver.",
+ "experimental": true,
+ "dependencies": [
+ "DOM",
+ "Network"
+ ],
+ "types": [
+ {
+ "id": "LargestContentfulPaint",
+ "description": "See https://github.com/WICG/LargestContentfulPaint and largest_contentful_paint.idl",
+ "type": "object",
+ "properties": [
+ {
+ "name": "renderTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "loadTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "size",
+ "description": "The number of pixels being painted.",
+ "type": "number"
+ },
+ {
+ "name": "elementId",
+ "description": "The id attribute of the element, if available.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "url",
+ "description": "The URL of the image (may be trimmed).",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "nodeId",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ }
+ ]
+ },
+ {
+ "id": "LayoutShiftAttribution",
+ "type": "object",
+ "properties": [
+ {
+ "name": "previousRect",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "currentRect",
+ "$ref": "DOM.Rect"
+ },
+ {
+ "name": "nodeId",
+ "optional": true,
+ "$ref": "DOM.BackendNodeId"
+ }
+ ]
+ },
+ {
+ "id": "LayoutShift",
+ "description": "See https://wicg.github.io/layout-instability/#sec-layout-shift and layout_shift.idl",
+ "type": "object",
+ "properties": [
+ {
+ "name": "value",
+ "description": "Score increment produced by this event.",
+ "type": "number"
+ },
+ {
+ "name": "hadRecentInput",
+ "type": "boolean"
+ },
+ {
+ "name": "lastInputTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "sources",
+ "type": "array",
+ "items": {
+ "$ref": "LayoutShiftAttribution"
+ }
+ }
+ ]
+ },
+ {
+ "id": "TimelineEvent",
+ "type": "object",
+ "properties": [
+ {
+ "name": "frameId",
+ "description": "Identifies the frame that this event is related to. Empty for non-frame targets.",
+ "$ref": "Page.FrameId"
+ },
+ {
+ "name": "type",
+ "description": "The event type, as specified in https://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype\nThis determines which of the optional \"details\" fiedls is present.",
+ "type": "string"
+ },
+ {
+ "name": "name",
+ "description": "Name may be empty depending on the type.",
+ "type": "string"
+ },
+ {
+ "name": "time",
+ "description": "Time in seconds since Epoch, monotonically increasing within document lifetime.",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "duration",
+ "description": "Event duration, if applicable.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "lcpDetails",
+ "optional": true,
+ "$ref": "LargestContentfulPaint"
+ },
+ {
+ "name": "layoutShiftDetails",
+ "optional": true,
+ "$ref": "LayoutShift"
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "enable",
+ "description": "Previously buffered events would be reported before method returns.\nSee also: timelineEventAdded",
+ "parameters": [
+ {
+ "name": "eventTypes",
+ "description": "The types of event to report, as specified in\nhttps://w3c.github.io/performance-timeline/#dom-performanceentry-entrytype\nThe specified filter overrides any previous filters, passing empty\nfilter disables recording.\nNote that not all types exposed to the web platform are currently supported.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "timelineEventAdded",
+ "description": "Sent when a performance timeline event is added. See reportPerformanceTimeline method.",
+ "parameters": [
+ {
+ "name": "event",
+ "$ref": "TimelineEvent"
+ }
+ ]
+ }
+ ]
+ },
{
"domain": "Security",
"description": "Security",
@@ -12898,25 +17782,190 @@
"type": "integer"
},
{
- "id": "MixedContentType",
- "description": "A description of mixed content (HTTP resources on HTTPS pages), as defined by\nhttps://www.w3.org/TR/mixed-content/#categories",
+ "id": "MixedContentType",
+ "description": "A description of mixed content (HTTP resources on HTTPS pages), as defined by\nhttps://www.w3.org/TR/mixed-content/#categories",
+ "type": "string",
+ "enum": [
+ "blockable",
+ "optionally-blockable",
+ "none"
+ ]
+ },
+ {
+ "id": "SecurityState",
+ "description": "The security level of a page or resource.",
+ "type": "string",
+ "enum": [
+ "unknown",
+ "neutral",
+ "insecure",
+ "secure",
+ "info",
+ "insecure-broken"
+ ]
+ },
+ {
+ "id": "CertificateSecurityState",
+ "description": "Details about the security state of the page certificate.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "protocol",
+ "description": "Protocol name (e.g. \"TLS 1.2\" or \"QUIC\").",
+ "type": "string"
+ },
+ {
+ "name": "keyExchange",
+ "description": "Key Exchange used by the connection, or the empty string if not applicable.",
+ "type": "string"
+ },
+ {
+ "name": "keyExchangeGroup",
+ "description": "(EC)DH group used by the connection, if applicable.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "cipher",
+ "description": "Cipher name.",
+ "type": "string"
+ },
+ {
+ "name": "mac",
+ "description": "TLS MAC. Note that AEAD ciphers do not have separate MACs.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "certificate",
+ "description": "Page certificate.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "subjectName",
+ "description": "Certificate subject name.",
+ "type": "string"
+ },
+ {
+ "name": "issuer",
+ "description": "Name of the issuing CA.",
+ "type": "string"
+ },
+ {
+ "name": "validFrom",
+ "description": "Certificate valid from date.",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "validTo",
+ "description": "Certificate valid to (expiration) date",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "certificateNetworkError",
+ "description": "The highest priority network error code, if the certificate has an error.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "certificateHasWeakSignature",
+ "description": "True if the certificate uses a weak signature aglorithm.",
+ "type": "boolean"
+ },
+ {
+ "name": "certificateHasSha1Signature",
+ "description": "True if the certificate has a SHA1 signature in the chain.",
+ "type": "boolean"
+ },
+ {
+ "name": "modernSSL",
+ "description": "True if modern SSL",
+ "type": "boolean"
+ },
+ {
+ "name": "obsoleteSslProtocol",
+ "description": "True if the connection is using an obsolete SSL protocol.",
+ "type": "boolean"
+ },
+ {
+ "name": "obsoleteSslKeyExchange",
+ "description": "True if the connection is using an obsolete SSL key exchange.",
+ "type": "boolean"
+ },
+ {
+ "name": "obsoleteSslCipher",
+ "description": "True if the connection is using an obsolete SSL cipher.",
+ "type": "boolean"
+ },
+ {
+ "name": "obsoleteSslSignature",
+ "description": "True if the connection is using an obsolete SSL signature.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "id": "SafetyTipStatus",
+ "experimental": true,
"type": "string",
"enum": [
- "blockable",
- "optionally-blockable",
- "none"
+ "badReputation",
+ "lookalike"
]
},
{
- "id": "SecurityState",
- "description": "The security level of a page or resource.",
- "type": "string",
- "enum": [
- "unknown",
- "neutral",
- "insecure",
- "secure",
- "info"
+ "id": "SafetyTipInfo",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "safetyTipStatus",
+ "description": "Describes whether the page triggers any safety tips or reputation warnings. Default is unknown.",
+ "$ref": "SafetyTipStatus"
+ },
+ {
+ "name": "safeUrl",
+ "description": "The URL the safety tip suggested (\"Did you mean?\"). Only filled in for lookalike matches.",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "VisibleSecurityState",
+ "description": "Security state information about the page.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "securityState",
+ "description": "The security level of the page.",
+ "$ref": "SecurityState"
+ },
+ {
+ "name": "certificateSecurityState",
+ "description": "Security state details about the page certificate.",
+ "optional": true,
+ "$ref": "CertificateSecurityState"
+ },
+ {
+ "name": "safetyTipInfo",
+ "description": "The type of Safety Tip triggered on the page. Note that this field will be set even if the Safety Tip UI was not actually shown.",
+ "optional": true,
+ "$ref": "SafetyTipInfo"
+ },
+ {
+ "name": "securityStateIssueIds",
+ "description": "Array of security state issues ids.",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
]
},
{
@@ -13096,8 +18145,21 @@
]
},
{
- "name": "securityStateChanged",
+ "name": "visibleSecurityStateChanged",
"description": "The security state of the page changed.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "visibleSecurityState",
+ "description": "Security state information about the page.",
+ "$ref": "VisibleSecurityState"
+ }
+ ]
+ },
+ {
+ "name": "securityStateChanged",
+ "description": "The security state of the page changed. No longer being sent.",
+ "deprecated": true,
"parameters": [
{
"name": "securityState",
@@ -13112,7 +18174,8 @@
},
{
"name": "explanations",
- "description": "List of explanations for the security state. If the overall security state is `insecure` or\n`warning`, at least one corresponding explanation should be included.",
+ "description": "Previously a list of explanations for the security state. Now always\nempty.",
+ "deprecated": true,
"type": "array",
"items": {
"$ref": "SecurityStateExplanation"
@@ -13126,7 +18189,8 @@
},
{
"name": "summary",
- "description": "Overrides user-visible description of the state.",
+ "description": "Overrides user-visible description of the state. Always omitted.",
+ "deprecated": true,
"optional": true,
"type": "string"
}
@@ -13137,6 +18201,9 @@
{
"domain": "ServiceWorker",
"experimental": true,
+ "dependencies": [
+ "Target"
+ ],
"types": [
{
"id": "RegistrationID",
@@ -13309,6 +18376,23 @@
}
]
},
+ {
+ "name": "dispatchPeriodicSyncEvent",
+ "parameters": [
+ {
+ "name": "origin",
+ "type": "string"
+ },
+ {
+ "name": "registrationId",
+ "$ref": "RegistrationID"
+ },
+ {
+ "name": "tag",
+ "type": "string"
+ }
+ ]
+ },
{
"name": "enable"
},
@@ -13373,102 +18457,274 @@
"name": "updateRegistration",
"parameters": [
{
- "name": "scopeURL",
+ "name": "scopeURL",
+ "type": "string"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "workerErrorReported",
+ "parameters": [
+ {
+ "name": "errorMessage",
+ "$ref": "ServiceWorkerErrorMessage"
+ }
+ ]
+ },
+ {
+ "name": "workerRegistrationUpdated",
+ "parameters": [
+ {
+ "name": "registrations",
+ "type": "array",
+ "items": {
+ "$ref": "ServiceWorkerRegistration"
+ }
+ }
+ ]
+ },
+ {
+ "name": "workerVersionUpdated",
+ "parameters": [
+ {
+ "name": "versions",
+ "type": "array",
+ "items": {
+ "$ref": "ServiceWorkerVersion"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Storage",
+ "experimental": true,
+ "dependencies": [
+ "Browser",
+ "Network"
+ ],
+ "types": [
+ {
+ "id": "StorageType",
+ "description": "Enum of possible storage types.",
+ "type": "string",
+ "enum": [
+ "appcache",
+ "cookies",
+ "file_systems",
+ "indexeddb",
+ "local_storage",
+ "shader_cache",
+ "websql",
+ "service_workers",
+ "cache_storage",
+ "interest_groups",
+ "all",
+ "other"
+ ]
+ },
+ {
+ "id": "UsageForType",
+ "description": "Usage for a storage type.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "storageType",
+ "description": "Name of storage type.",
+ "$ref": "StorageType"
+ },
+ {
+ "name": "usage",
+ "description": "Storage usage (bytes).",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "id": "TrustTokens",
+ "description": "Pair of issuer origin and number of available (signed, but not used) Trust\nTokens from that issuer.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "issuerOrigin",
+ "type": "string"
+ },
+ {
+ "name": "count",
+ "type": "number"
+ }
+ ]
+ },
+ {
+ "id": "InterestGroupAccessType",
+ "description": "Enum of interest group access types.",
+ "type": "string",
+ "enum": [
+ "join",
+ "leave",
+ "update",
+ "bid",
+ "win"
+ ]
+ },
+ {
+ "id": "InterestGroupAd",
+ "description": "Ad advertising element inside an interest group.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "renderUrl",
+ "type": "string"
+ },
+ {
+ "name": "metadata",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "InterestGroupDetails",
+ "description": "The full details of an interest group.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "ownerOrigin",
+ "type": "string"
+ },
+ {
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "expirationTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "joiningOrigin",
+ "type": "string"
+ },
+ {
+ "name": "biddingUrl",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "biddingWasmHelperUrl",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "updateUrl",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "trustedBiddingSignalsUrl",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "trustedBiddingSignalsKeys",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "name": "userBiddingSignals",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "ads",
+ "type": "array",
+ "items": {
+ "$ref": "InterestGroupAd"
+ }
+ },
+ {
+ "name": "adComponents",
+ "type": "array",
+ "items": {
+ "$ref": "InterestGroupAd"
+ }
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "clearDataForOrigin",
+ "description": "Clears storage for origin.",
+ "parameters": [
+ {
+ "name": "origin",
+ "description": "Security origin.",
+ "type": "string"
+ },
+ {
+ "name": "storageTypes",
+ "description": "Comma separated list of StorageType to clear.",
"type": "string"
}
]
- }
- ],
- "events": [
+ },
{
- "name": "workerErrorReported",
+ "name": "getCookies",
+ "description": "Returns all browser cookies.",
"parameters": [
{
- "name": "errorMessage",
- "$ref": "ServiceWorkerErrorMessage"
+ "name": "browserContextId",
+ "description": "Browser context to use when called on the browser endpoint.",
+ "optional": true,
+ "$ref": "Browser.BrowserContextID"
}
- ]
- },
- {
- "name": "workerRegistrationUpdated",
- "parameters": [
+ ],
+ "returns": [
{
- "name": "registrations",
+ "name": "cookies",
+ "description": "Array of cookie objects.",
"type": "array",
"items": {
- "$ref": "ServiceWorkerRegistration"
+ "$ref": "Network.Cookie"
}
}
]
},
{
- "name": "workerVersionUpdated",
+ "name": "setCookies",
+ "description": "Sets given cookies.",
"parameters": [
{
- "name": "versions",
+ "name": "cookies",
+ "description": "Cookies to be set.",
"type": "array",
"items": {
- "$ref": "ServiceWorkerVersion"
+ "$ref": "Network.CookieParam"
}
- }
- ]
- }
- ]
- },
- {
- "domain": "Storage",
- "experimental": true,
- "types": [
- {
- "id": "StorageType",
- "description": "Enum of possible storage types.",
- "type": "string",
- "enum": [
- "appcache",
- "cookies",
- "file_systems",
- "indexeddb",
- "local_storage",
- "shader_cache",
- "websql",
- "service_workers",
- "cache_storage",
- "all",
- "other"
- ]
- },
- {
- "id": "UsageForType",
- "description": "Usage for a storage type.",
- "type": "object",
- "properties": [
- {
- "name": "storageType",
- "description": "Name of storage type.",
- "$ref": "StorageType"
},
{
- "name": "usage",
- "description": "Storage usage (bytes).",
- "type": "number"
+ "name": "browserContextId",
+ "description": "Browser context to use when called on the browser endpoint.",
+ "optional": true,
+ "$ref": "Browser.BrowserContextID"
}
]
- }
- ],
- "commands": [
+ },
{
- "name": "clearDataForOrigin",
- "description": "Clears storage for origin.",
+ "name": "clearCookies",
+ "description": "Clears cookies.",
"parameters": [
{
- "name": "origin",
- "description": "Security origin.",
- "type": "string"
- },
- {
- "name": "storageTypes",
- "description": "Comma separated list of StorageType to clear.",
- "type": "string"
+ "name": "browserContextId",
+ "description": "Browser context to use when called on the browser endpoint.",
+ "optional": true,
+ "$ref": "Browser.BrowserContextID"
}
]
},
@@ -13493,6 +18749,11 @@
"description": "Storage quota (bytes).",
"type": "number"
},
+ {
+ "name": "overrideActive",
+ "description": "Whether or not the origin has an active storage quota override",
+ "type": "boolean"
+ },
{
"name": "usageBreakdown",
"description": "Storage usage per type (bytes).",
@@ -13503,6 +18764,24 @@
}
]
},
+ {
+ "name": "overrideQuotaForOrigin",
+ "description": "Override quota for the specified origin",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "origin",
+ "description": "Security origin.",
+ "type": "string"
+ },
+ {
+ "name": "quotaSize",
+ "description": "The quota size (in bytes) to override the original quota with.\nIf this is called multiple times, the overridden quota will be equal to\nthe quotaSize provided in the final call. If this is called without\nspecifying a quotaSize, the quota will be reset to the default value for\nthe specified origin. If this is called multiple times with different\norigins, the override will be maintained for each origin until it is\ndisabled (called without a quotaSize).",
+ "optional": true,
+ "type": "number"
+ }
+ ]
+ },
{
"name": "trackCacheStorageForOrigin",
"description": "Registers origin to be notified when an update occurs to its cache storage list.",
@@ -13546,6 +18825,70 @@
"type": "string"
}
]
+ },
+ {
+ "name": "getTrustTokens",
+ "description": "Returns the number of stored Trust Tokens per issuer for the\ncurrent browsing context.",
+ "experimental": true,
+ "returns": [
+ {
+ "name": "tokens",
+ "type": "array",
+ "items": {
+ "$ref": "TrustTokens"
+ }
+ }
+ ]
+ },
+ {
+ "name": "clearTrustTokens",
+ "description": "Removes all Trust Tokens issued by the provided issuerOrigin.\nLeaves other stored data, including the issuer's Redemption Records, intact.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "issuerOrigin",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "didDeleteTokens",
+ "description": "True if any tokens were deleted, false otherwise.",
+ "type": "boolean"
+ }
+ ]
+ },
+ {
+ "name": "getInterestGroupDetails",
+ "description": "Gets details for a named interest group.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "ownerOrigin",
+ "type": "string"
+ },
+ {
+ "name": "name",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "details",
+ "$ref": "InterestGroupDetails"
+ }
+ ]
+ },
+ {
+ "name": "setInterestGroupTracking",
+ "description": "Enables/Disables issuing of interestGroupAccessed events.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "enable",
+ "type": "boolean"
+ }
+ ]
}
],
"events": [
@@ -13607,6 +18950,28 @@
"type": "string"
}
]
+ },
+ {
+ "name": "interestGroupAccessed",
+ "description": "One of the interest groups was accessed by the associated page.",
+ "parameters": [
+ {
+ "name": "accessTime",
+ "$ref": "Network.TimeSinceEpoch"
+ },
+ {
+ "name": "type",
+ "$ref": "InterestGroupAccessType"
+ },
+ {
+ "name": "ownerOrigin",
+ "type": "string"
+ },
+ {
+ "name": "name",
+ "type": "string"
+ }
+ ]
}
]
},
@@ -13630,6 +18995,18 @@
"description": "PCI ID of the GPU device, if available; 0 otherwise.",
"type": "number"
},
+ {
+ "name": "subSysId",
+ "description": "Sub sys ID of the GPU, only available on Windows.",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "revision",
+ "description": "Revision of the GPU, only available on Windows.",
+ "optional": true,
+ "type": "number"
+ },
{
"name": "vendorString",
"description": "String description of the GPU vendor, if the PCI ID is not available.",
@@ -13727,6 +19104,16 @@
"yuv444"
]
},
+ {
+ "id": "ImageType",
+ "description": "Image format of a given image.",
+ "type": "string",
+ "enum": [
+ "jpeg",
+ "webp",
+ "unknown"
+ ]
+ },
{
"id": "ImageDecodeAcceleratorCapability",
"description": "Describes a supported image decoding profile with its associated minimum and\nmaximum resolutions and subsampling.",
@@ -13735,7 +19122,7 @@
{
"name": "imageType",
"description": "Image coded, e.g. Jpeg.",
- "type": "string"
+ "$ref": "ImageType"
},
{
"name": "maxDimensions",
@@ -13895,11 +19282,6 @@
"description": "Unique identifier of attached debugging session.",
"type": "string"
},
- {
- "id": "BrowserContextID",
- "experimental": true,
- "type": "string"
- },
{
"id": "TargetInfo",
"type": "object",
@@ -13931,11 +19313,24 @@
"optional": true,
"$ref": "TargetID"
},
+ {
+ "name": "canAccessOpener",
+ "description": "Whether the target has access to the originating window.",
+ "experimental": true,
+ "type": "boolean"
+ },
+ {
+ "name": "openerFrameId",
+ "description": "Frame id of originating window (is only set if target has an opener).",
+ "experimental": true,
+ "optional": true,
+ "$ref": "Page.FrameId"
+ },
{
"name": "browserContextId",
"experimental": true,
"optional": true,
- "$ref": "BrowserContextID"
+ "$ref": "Browser.BrowserContextID"
}
]
},
@@ -13976,8 +19371,7 @@
},
{
"name": "flatten",
- "description": "Enables \"flat\" access to the session via specifying sessionId attribute in the commands.",
- "experimental": true,
+ "description": "Enables \"flat\" access to the session via specifying sessionId attribute in the commands.\nWe plan to make this the default, deprecate non-flattened mode,\nand eventually retire it. See crbug.com/991325.",
"optional": true,
"type": "boolean"
}
@@ -14014,6 +19408,8 @@
"returns": [
{
"name": "success",
+ "description": "Always set to true. If an error occurs, the response indicates protocol error.",
+ "deprecated": true,
"type": "boolean"
}
]
@@ -14039,11 +19435,40 @@
"name": "createBrowserContext",
"description": "Creates a new empty BrowserContext. Similar to an incognito profile but you can have more than\none.",
"experimental": true,
+ "parameters": [
+ {
+ "name": "disposeOnDetach",
+ "description": "If specified, disposes this context when debugging session disconnects.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "proxyServer",
+ "description": "Proxy server, similar to the one passed to --proxy-server",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "proxyBypassList",
+ "description": "Proxy bypass list, similar to the one passed to --proxy-bypass-list",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "originsWithUniversalNetworkAccess",
+ "description": "An optional list of origins to grant unlimited cross-origin access to.\nParts of the URL other than those constituting origin are ignored.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ],
"returns": [
{
"name": "browserContextId",
"description": "The id of the context created.",
- "$ref": "BrowserContextID"
+ "$ref": "Browser.BrowserContextID"
}
]
},
@@ -14057,7 +19482,7 @@
"description": "An array of browser context ids.",
"type": "array",
"items": {
- "$ref": "BrowserContextID"
+ "$ref": "Browser.BrowserContextID"
}
}
]
@@ -14068,7 +19493,7 @@
"parameters": [
{
"name": "url",
- "description": "The initial URL the page will be navigated to.",
+ "description": "The initial URL the page will be navigated to. An empty string indicates about:blank.",
"type": "string"
},
{
@@ -14086,8 +19511,9 @@
{
"name": "browserContextId",
"description": "The browser context to create the page in.",
+ "experimental": true,
"optional": true,
- "$ref": "BrowserContextID"
+ "$ref": "Browser.BrowserContextID"
},
{
"name": "enableBeginFrameControl",
@@ -14143,7 +19569,7 @@
"parameters": [
{
"name": "browserContextId",
- "$ref": "BrowserContextID"
+ "$ref": "Browser.BrowserContextID"
}
]
},
@@ -14181,7 +19607,8 @@
},
{
"name": "sendMessageToTarget",
- "description": "Sends protocol message over session with given id.",
+ "description": "Sends protocol message over session with given id.\nConsider using flat mode instead; see commands attachToTarget, setAutoAttach,\nand crbug.com/991325.",
+ "deprecated": true,
"parameters": [
{
"name": "message",
@@ -14204,7 +19631,7 @@
},
{
"name": "setAutoAttach",
- "description": "Controls whether to automatically attach to new targets which are considered to be related to\nthis one. When turned on, attaches to all existing related targets as well. When turned off,\nautomatically detaches from all currently attached targets.",
+ "description": "Controls whether to automatically attach to new targets which are considered to be related to\nthis one. When turned on, attaches to all existing related targets as well. When turned off,\nautomatically detaches from all currently attached targets.\nThis also clears all targets added by `autoAttachRelated` from the list of targets to watch\nfor creation of related targets.",
"experimental": true,
"parameters": [
{
@@ -14219,13 +19646,28 @@
},
{
"name": "flatten",
- "description": "Enables \"flat\" access to the session via specifying sessionId attribute in the commands.",
- "experimental": true,
+ "description": "Enables \"flat\" access to the session via specifying sessionId attribute in the commands.\nWe plan to make this the default, deprecate non-flattened mode,\nand eventually retire it. See crbug.com/991325.",
"optional": true,
"type": "boolean"
}
]
},
+ {
+ "name": "autoAttachRelated",
+ "description": "Adds the specified target to the list of targets that will be monitored for any related target\ncreation (such as child frames, child workers and new versions of service worker) and reported\nthrough `attachedToTarget`. The specified target is also auto-attached.\nThis cancels the effect of any previous `setAutoAttach` and is also cancelled by subsequent\n`setAutoAttach`. Only available at the Browser target.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "targetId",
+ "$ref": "TargetID"
+ },
+ {
+ "name": "waitForDebuggerOnStart",
+ "description": "Whether to pause new targets when attaching to them. Use `Runtime.runIfWaitingForDebugger`\nto run paused targets.",
+ "type": "boolean"
+ }
+ ]
+ },
{
"name": "setDiscoverTargets",
"description": "Controls whether to discover available targets and notify via\n`targetCreated/targetInfoChanged/targetDestroyed` events.",
@@ -14505,12 +19947,32 @@
]
},
{
- "id": "StreamCompression",
- "description": "Compression type to use for traces returned via streams.",
+ "id": "StreamCompression",
+ "description": "Compression type to use for traces returned via streams.",
+ "type": "string",
+ "enum": [
+ "none",
+ "gzip"
+ ]
+ },
+ {
+ "id": "MemoryDumpLevelOfDetail",
+ "description": "Details exposed when memory request explicitly declared.\nKeep consistent with memory_dump_request_args.h and\nmemory_instrumentation.mojom",
+ "type": "string",
+ "enum": [
+ "background",
+ "light",
+ "detailed"
+ ]
+ },
+ {
+ "id": "TracingBackend",
+ "description": "Backend type to use for tracing. `chrome` uses the Chrome-integrated\ntracing service and is supported on all platforms. `system` is only\nsupported on Chrome OS and uses the Perfetto system tracing service.\n`auto` chooses `system` when the perfettoConfig provided to Tracing.start\nspecifies at least one non-Chrome data source; otherwise uses `chrome`.",
"type": "string",
"enum": [
- "none",
- "gzip"
+ "auto",
+ "chrome",
+ "system"
]
}
],
@@ -14547,6 +20009,20 @@
{
"name": "requestMemoryDump",
"description": "Request a global memory dump.",
+ "parameters": [
+ {
+ "name": "deterministic",
+ "description": "Enables more deterministic results by forcing garbage collection",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "levelOfDetail",
+ "description": "Specifies level of details in memory dump. Defaults to \"detailed\".",
+ "optional": true,
+ "$ref": "MemoryDumpLevelOfDetail"
+ }
+ ],
"returns": [
{
"name": "dumpGuid",
@@ -14610,6 +20086,18 @@
"name": "traceConfig",
"optional": true,
"$ref": "TraceConfig"
+ },
+ {
+ "name": "perfettoConfig",
+ "description": "Base64-encoded serialized perfetto.protos.TraceConfig protobuf message\nWhen specified, the parameters `categories`, `options`, `traceConfig`\nare ignored. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "tracingBackend",
+ "description": "Backend type (defaults to `auto`)",
+ "optional": true,
+ "$ref": "TracingBackend"
}
]
}
@@ -14685,7 +20173,6 @@
{
"domain": "Fetch",
"description": "A domain for letting clients substitute browser's network layer with client code.",
- "experimental": true,
"dependencies": [
"Network",
"IO",
@@ -14699,8 +20186,7 @@
},
{
"id": "RequestStage",
- "description": "Stages of the request to handle. Request will intercept before the request is\nsent. Response will intercept after the response is received (but before response\nbody is received.",
- "experimental": true,
+ "description": "Stages of the request to handle. Request will intercept before the request is\nsent. Response will intercept after the response is received (but before response\nbody is received).",
"type": "string",
"enum": [
"Request",
@@ -14709,12 +20195,11 @@
},
{
"id": "RequestPattern",
- "experimental": true,
"type": "object",
"properties": [
{
"name": "urlPattern",
- "description": "Wildcards ('*' -> zero or more, '?' -> exactly one) are allowed. Escape character is\nbackslash. Omitting is equivalent to \"*\".",
+ "description": "Wildcards (`'*'` -> zero or more, `'?'` -> exactly one) are allowed. Escape character is\nbackslash. Omitting is equivalent to `\"*\"`.",
"optional": true,
"type": "string"
},
@@ -14726,7 +20211,7 @@
},
{
"name": "requestStage",
- "description": "Stage at wich to begin intercepting requests. Default is Request.",
+ "description": "Stage at which to begin intercepting requests. Default is Request.",
"optional": true,
"$ref": "RequestStage"
}
@@ -14750,7 +20235,6 @@
{
"id": "AuthChallenge",
"description": "Authorization challenge for HTTP status code 401 or 407.",
- "experimental": true,
"type": "object",
"properties": [
{
@@ -14783,7 +20267,6 @@
{
"id": "AuthChallengeResponse",
"description": "Response to an AuthChallenge.",
- "experimental": true,
"type": "object",
"properties": [
{
@@ -14870,20 +20353,27 @@
{
"name": "responseHeaders",
"description": "Response headers.",
+ "optional": true,
"type": "array",
"items": {
"$ref": "HeaderEntry"
}
},
+ {
+ "name": "binaryResponseHeaders",
+ "description": "Alternative way of specifying response headers as a \\0-separated\nseries of name: value pairs. Prefer the above method unless you\nneed to represent some non-UTF8 values that can't be transmitted\nover the protocol as text. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
+ },
{
"name": "body",
- "description": "A response body.",
+ "description": "A response body. If absent, original response body will be used if\nthe request is intercepted at the response stage and empty body\nwill be used if the request is intercepted at the request stage. (Encoded as a base64 string when passed over JSON)",
"optional": true,
"type": "string"
},
{
"name": "responsePhrase",
- "description": "A textual representation of responseCode.\nIf absent, a standard phrase mathcing responseCode is used.",
+ "description": "A textual representation of responseCode.\nIf absent, a standard phrase matching responseCode is used.",
"optional": true,
"type": "string"
}
@@ -14912,18 +20402,25 @@
},
{
"name": "postData",
- "description": "If set, overrides the post data in the request.",
+ "description": "If set, overrides the post data in the request. (Encoded as a base64 string when passed over JSON)",
"optional": true,
"type": "string"
},
{
"name": "headers",
- "description": "If set, overrides the request headrts.",
+ "description": "If set, overrides the request headers.",
"optional": true,
"type": "array",
"items": {
"$ref": "HeaderEntry"
}
+ },
+ {
+ "name": "interceptResponse",
+ "description": "If set, overrides response interception behavior for this request.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
]
},
@@ -14943,6 +20440,45 @@
}
]
},
+ {
+ "name": "continueResponse",
+ "description": "Continues loading of the paused response, optionally modifying the\nresponse headers. If either responseCode or headers are modified, all of them\nmust be present.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "requestId",
+ "description": "An id the client received in requestPaused event.",
+ "$ref": "RequestId"
+ },
+ {
+ "name": "responseCode",
+ "description": "An HTTP response code. If absent, original response code will be used.",
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "responsePhrase",
+ "description": "A textual representation of responseCode.\nIf absent, a standard phrase matching responseCode is used.",
+ "optional": true,
+ "type": "string"
+ },
+ {
+ "name": "responseHeaders",
+ "description": "Response headers. If absent, original response headers will be used.",
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "HeaderEntry"
+ }
+ },
+ {
+ "name": "binaryResponseHeaders",
+ "description": "Alternative way of specifying response headers as a \\0-separated\nseries of name: value pairs. Prefer the above method unless you\nneed to represent some non-UTF8 values that can't be transmitted\nover the protocol as text. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
{
"name": "getResponseBody",
"description": "Causes the body of the response to be received from the server and\nreturned as a single string. May only be issued for a request that\nis paused in the Response stage and is mutually exclusive with\ntakeResponseBodyForInterceptionAsStream. Calling other methods that\naffect the request or disabling fetch domain before body is received\nresults in an undefined behavior.",
@@ -15020,6 +20556,12 @@
"optional": true,
"type": "integer"
},
+ {
+ "name": "responseStatusText",
+ "description": "Response status text if intercepted at response stage.",
+ "optional": true,
+ "type": "string"
+ },
{
"name": "responseHeaders",
"description": "Response headers if intercepted at the response stage.",
@@ -15076,8 +20618,8 @@
"experimental": true,
"types": [
{
- "id": "ContextId",
- "description": "Context's UUID in string",
+ "id": "GraphObjectId",
+ "description": "An unique ID for a graph object (AudioContext, AudioNode, AudioParam) in Web Audio API",
"type": "string"
},
{
@@ -15099,6 +20641,44 @@
"closed"
]
},
+ {
+ "id": "NodeType",
+ "description": "Enum of AudioNode types",
+ "type": "string"
+ },
+ {
+ "id": "ChannelCountMode",
+ "description": "Enum of AudioNode::ChannelCountMode from the spec",
+ "type": "string",
+ "enum": [
+ "clamped-max",
+ "explicit",
+ "max"
+ ]
+ },
+ {
+ "id": "ChannelInterpretation",
+ "description": "Enum of AudioNode::ChannelInterpretation from the spec",
+ "type": "string",
+ "enum": [
+ "discrete",
+ "speakers"
+ ]
+ },
+ {
+ "id": "ParamType",
+ "description": "Enum of AudioParam types",
+ "type": "string"
+ },
+ {
+ "id": "AutomationRate",
+ "description": "Enum of AudioParam::AutomationRate from the spec",
+ "type": "string",
+ "enum": [
+ "a-rate",
+ "k-rate"
+ ]
+ },
{
"id": "ContextRealtimeData",
"description": "Fields in AudioContext that change in real-time.",
@@ -15111,7 +20691,7 @@
},
{
"name": "renderCapacity",
- "description": "The time spent on rendering graph divided by render qunatum duration,\nand multiplied by 100. 100 means the audio renderer reached the full\ncapacity and glitch may occur.",
+ "description": "The time spent on rendering graph divided by render quantum duration,\nand multiplied by 100. 100 means the audio renderer reached the full\ncapacity and glitch may occur.",
"type": "number"
},
{
@@ -15133,7 +20713,7 @@
"properties": [
{
"name": "contextId",
- "$ref": "ContextId"
+ "$ref": "GraphObjectId"
},
{
"name": "contextType",
@@ -15164,6 +20744,99 @@
"type": "number"
}
]
+ },
+ {
+ "id": "AudioListener",
+ "description": "Protocol object for AudioListener",
+ "type": "object",
+ "properties": [
+ {
+ "name": "listenerId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ }
+ ]
+ },
+ {
+ "id": "AudioNode",
+ "description": "Protocol object for AudioNode",
+ "type": "object",
+ "properties": [
+ {
+ "name": "nodeId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "nodeType",
+ "$ref": "NodeType"
+ },
+ {
+ "name": "numberOfInputs",
+ "type": "number"
+ },
+ {
+ "name": "numberOfOutputs",
+ "type": "number"
+ },
+ {
+ "name": "channelCount",
+ "type": "number"
+ },
+ {
+ "name": "channelCountMode",
+ "$ref": "ChannelCountMode"
+ },
+ {
+ "name": "channelInterpretation",
+ "$ref": "ChannelInterpretation"
+ }
+ ]
+ },
+ {
+ "id": "AudioParam",
+ "description": "Protocol object for AudioParam",
+ "type": "object",
+ "properties": [
+ {
+ "name": "paramId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "nodeId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "paramType",
+ "$ref": "ParamType"
+ },
+ {
+ "name": "rate",
+ "$ref": "AutomationRate"
+ },
+ {
+ "name": "defaultValue",
+ "type": "number"
+ },
+ {
+ "name": "minValue",
+ "type": "number"
+ },
+ {
+ "name": "maxValue",
+ "type": "number"
+ }
+ ]
}
],
"commands": [
@@ -15181,7 +20854,7 @@
"parameters": [
{
"name": "contextId",
- "$ref": "ContextId"
+ "$ref": "GraphObjectId"
}
],
"returns": [
@@ -15190,36 +20863,214 @@
"$ref": "ContextRealtimeData"
}
]
- }
- ],
- "events": [
+ }
+ ],
+ "events": [
+ {
+ "name": "contextCreated",
+ "description": "Notifies that a new BaseAudioContext has been created.",
+ "parameters": [
+ {
+ "name": "context",
+ "$ref": "BaseAudioContext"
+ }
+ ]
+ },
+ {
+ "name": "contextWillBeDestroyed",
+ "description": "Notifies that an existing BaseAudioContext will be destroyed.",
+ "parameters": [
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ }
+ ]
+ },
+ {
+ "name": "contextChanged",
+ "description": "Notifies that existing BaseAudioContext has changed some properties (id stays the same)..",
+ "parameters": [
+ {
+ "name": "context",
+ "$ref": "BaseAudioContext"
+ }
+ ]
+ },
+ {
+ "name": "audioListenerCreated",
+ "description": "Notifies that the construction of an AudioListener has finished.",
+ "parameters": [
+ {
+ "name": "listener",
+ "$ref": "AudioListener"
+ }
+ ]
+ },
+ {
+ "name": "audioListenerWillBeDestroyed",
+ "description": "Notifies that a new AudioListener has been created.",
+ "parameters": [
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "listenerId",
+ "$ref": "GraphObjectId"
+ }
+ ]
+ },
+ {
+ "name": "audioNodeCreated",
+ "description": "Notifies that a new AudioNode has been created.",
+ "parameters": [
+ {
+ "name": "node",
+ "$ref": "AudioNode"
+ }
+ ]
+ },
+ {
+ "name": "audioNodeWillBeDestroyed",
+ "description": "Notifies that an existing AudioNode has been destroyed.",
+ "parameters": [
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "nodeId",
+ "$ref": "GraphObjectId"
+ }
+ ]
+ },
+ {
+ "name": "audioParamCreated",
+ "description": "Notifies that a new AudioParam has been created.",
+ "parameters": [
+ {
+ "name": "param",
+ "$ref": "AudioParam"
+ }
+ ]
+ },
+ {
+ "name": "audioParamWillBeDestroyed",
+ "description": "Notifies that an existing AudioParam has been destroyed.",
+ "parameters": [
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "nodeId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "paramId",
+ "$ref": "GraphObjectId"
+ }
+ ]
+ },
+ {
+ "name": "nodesConnected",
+ "description": "Notifies that two AudioNodes are connected.",
+ "parameters": [
+ {
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "destinationId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceOutputIndex",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "destinationInputIndex",
+ "optional": true,
+ "type": "number"
+ }
+ ]
+ },
{
- "name": "contextCreated",
- "description": "Notifies that a new BaseAudioContext has been created.",
+ "name": "nodesDisconnected",
+ "description": "Notifies that AudioNodes are disconnected. The destination can be null, and it means all the outgoing connections from the source are disconnected.",
"parameters": [
{
- "name": "context",
- "$ref": "BaseAudioContext"
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "destinationId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceOutputIndex",
+ "optional": true,
+ "type": "number"
+ },
+ {
+ "name": "destinationInputIndex",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "name": "contextDestroyed",
- "description": "Notifies that existing BaseAudioContext has been destroyed.",
+ "name": "nodeParamConnected",
+ "description": "Notifies that an AudioNode is connected to an AudioParam.",
"parameters": [
{
"name": "contextId",
- "$ref": "ContextId"
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "destinationId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceOutputIndex",
+ "optional": true,
+ "type": "number"
}
]
},
{
- "name": "contextChanged",
- "description": "Notifies that existing BaseAudioContext has changed some properties (id stays the same)..",
+ "name": "nodeParamDisconnected",
+ "description": "Notifies that an AudioNode is disconnected to an AudioParam.",
"parameters": [
{
- "name": "context",
- "$ref": "BaseAudioContext"
+ "name": "contextId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "destinationId",
+ "$ref": "GraphObjectId"
+ },
+ {
+ "name": "sourceOutputIndex",
+ "optional": true,
+ "type": "number"
}
]
}
@@ -15242,6 +21093,14 @@
"ctap2"
]
},
+ {
+ "id": "Ctap2Version",
+ "type": "string",
+ "enum": [
+ "ctap2_0",
+ "ctap2_1"
+ ]
+ },
{
"id": "AuthenticatorTransport",
"type": "string",
@@ -15261,16 +21120,44 @@
"name": "protocol",
"$ref": "AuthenticatorProtocol"
},
+ {
+ "name": "ctap2Version",
+ "description": "Defaults to ctap2_0. Ignored if |protocol| == u2f.",
+ "optional": true,
+ "$ref": "Ctap2Version"
+ },
{
"name": "transport",
"$ref": "AuthenticatorTransport"
},
{
"name": "hasResidentKey",
+ "description": "Defaults to false.",
+ "optional": true,
"type": "boolean"
},
{
"name": "hasUserVerification",
+ "description": "Defaults to false.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "hasLargeBlob",
+ "description": "If set to true, the authenticator will support the largeBlob extension.\nhttps://w3c.github.io/webauthn#largeBlob\nDefaults to false.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "hasCredBlob",
+ "description": "If set to true, the authenticator will support the credBlob extension.\nhttps://fidoalliance.org/specs/fido-v2.1-rd-20201208/fido-client-to-authenticator-protocol-v2.1-rd-20201208.html#sctn-credBlob-extension\nDefaults to false.",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "hasMinPinLength",
+ "description": "If set to true, the authenticator will support the minPinLength extension.\nhttps://fidoalliance.org/specs/fido-v2.1-ps-20210615/fido-client-to-authenticator-protocol-v2.1-ps-20210615.html#sctn-minpinlength-extension\nDefaults to false.",
+ "optional": true,
"type": "boolean"
},
{
@@ -15278,6 +21165,12 @@
"description": "If set to true, tests of user presence will succeed immediately.\nOtherwise, they will not be resolved. Defaults to true.",
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "isUserVerified",
+ "description": "Sets whether User Verification succeeds or fails for an authenticator.\nDefaults to false.",
+ "optional": true,
+ "type": "boolean"
}
]
},
@@ -15290,19 +21183,36 @@
"type": "string"
},
{
- "name": "rpIdHash",
- "description": "SHA-256 hash of the Relying Party ID the credential is scoped to. Must\nbe 32 bytes long.\nSee https://w3c.github.io/webauthn/#rpidhash",
+ "name": "isResidentCredential",
+ "type": "boolean"
+ },
+ {
+ "name": "rpId",
+ "description": "Relying Party ID the credential is scoped to. Must be set when adding a\ncredential.",
+ "optional": true,
"type": "string"
},
{
"name": "privateKey",
- "description": "The private key in PKCS#8 format.",
+ "description": "The ECDSA P-256 private key in PKCS#8 format. (Encoded as a base64 string when passed over JSON)",
+ "type": "string"
+ },
+ {
+ "name": "userHandle",
+ "description": "An opaque byte sequence with a maximum size of 64 bytes mapping the\ncredential to a specific user. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
"type": "string"
},
{
"name": "signCount",
"description": "Signature counter. This is incremented by one for each successful\nassertion.\nSee https://w3c.github.io/webauthn/#signature-counter",
"type": "integer"
+ },
+ {
+ "name": "largeBlob",
+ "description": "The large blob associated with the credential.\nSee https://w3c.github.io/webauthn/#sctn-large-blob-extension (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
}
]
}
@@ -15356,6 +21266,26 @@
}
]
},
+ {
+ "name": "getCredential",
+ "description": "Returns a single credential stored in the given virtual authenticator that\nmatches the credential ID.",
+ "parameters": [
+ {
+ "name": "authenticatorId",
+ "$ref": "AuthenticatorId"
+ },
+ {
+ "name": "credentialId",
+ "type": "string"
+ }
+ ],
+ "returns": [
+ {
+ "name": "credential",
+ "$ref": "Credential"
+ }
+ ]
+ },
{
"name": "getCredentials",
"description": "Returns all the credentials stored in the given virtual authenticator.",
@@ -15375,6 +21305,20 @@
}
]
},
+ {
+ "name": "removeCredential",
+ "description": "Removes a credential from the authenticator.",
+ "parameters": [
+ {
+ "name": "authenticatorId",
+ "$ref": "AuthenticatorId"
+ },
+ {
+ "name": "credentialId",
+ "type": "string"
+ }
+ ]
+ },
{
"name": "clearCredentials",
"description": "Clears all the credentials from the specified device.",
@@ -15398,6 +21342,201 @@
"type": "boolean"
}
]
+ },
+ {
+ "name": "setAutomaticPresenceSimulation",
+ "description": "Sets whether tests of user presence will succeed immediately (if true) or fail to resolve (if false) for an authenticator.\nThe default is true.",
+ "parameters": [
+ {
+ "name": "authenticatorId",
+ "$ref": "AuthenticatorId"
+ },
+ {
+ "name": "enabled",
+ "type": "boolean"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "domain": "Media",
+ "description": "This domain allows detailed inspection of media elements",
+ "experimental": true,
+ "types": [
+ {
+ "id": "PlayerId",
+ "description": "Players will get an ID that is unique within the agent context.",
+ "type": "string"
+ },
+ {
+ "id": "Timestamp",
+ "type": "number"
+ },
+ {
+ "id": "PlayerMessage",
+ "description": "Have one type per entry in MediaLogRecord::Type\nCorresponds to kMessage",
+ "type": "object",
+ "properties": [
+ {
+ "name": "level",
+ "description": "Keep in sync with MediaLogMessageLevel\nWe are currently keeping the message level 'error' separate from the\nPlayerError type because right now they represent different things,\nthis one being a DVLOG(ERROR) style log message that gets printed\nbased on what log level is selected in the UI, and the other is a\nrepresentation of a media::PipelineStatus object. Soon however we're\ngoing to be moving away from using PipelineStatus for errors and\nintroducing a new error type which should hopefully let us integrate\nthe error log level into the PlayerError type.",
+ "type": "string",
+ "enum": [
+ "error",
+ "warning",
+ "info",
+ "debug"
+ ]
+ },
+ {
+ "name": "message",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "PlayerProperty",
+ "description": "Corresponds to kMediaPropertyChange",
+ "type": "object",
+ "properties": [
+ {
+ "name": "name",
+ "type": "string"
+ },
+ {
+ "name": "value",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "PlayerEvent",
+ "description": "Corresponds to kMediaEventTriggered",
+ "type": "object",
+ "properties": [
+ {
+ "name": "timestamp",
+ "$ref": "Timestamp"
+ },
+ {
+ "name": "value",
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "id": "PlayerError",
+ "description": "Corresponds to kMediaError",
+ "type": "object",
+ "properties": [
+ {
+ "name": "type",
+ "type": "string",
+ "enum": [
+ "pipeline_error",
+ "media_error"
+ ]
+ },
+ {
+ "name": "errorCode",
+ "description": "When this switches to using media::Status instead of PipelineStatus\nwe can remove \"errorCode\" and replace it with the fields from\na Status instance. This also seems like a duplicate of the error\nlevel enum - there is a todo bug to have that level removed and\nuse this instead. (crbug.com/1068454)",
+ "type": "string"
+ }
+ ]
+ }
+ ],
+ "events": [
+ {
+ "name": "playerPropertiesChanged",
+ "description": "This can be called multiple times, and can be used to set / override /\nremove player properties. A null propValue indicates removal.",
+ "parameters": [
+ {
+ "name": "playerId",
+ "$ref": "PlayerId"
+ },
+ {
+ "name": "properties",
+ "type": "array",
+ "items": {
+ "$ref": "PlayerProperty"
+ }
+ }
+ ]
+ },
+ {
+ "name": "playerEventsAdded",
+ "description": "Send events as a list, allowing them to be batched on the browser for less\ncongestion. If batched, events must ALWAYS be in chronological order.",
+ "parameters": [
+ {
+ "name": "playerId",
+ "$ref": "PlayerId"
+ },
+ {
+ "name": "events",
+ "type": "array",
+ "items": {
+ "$ref": "PlayerEvent"
+ }
+ }
+ ]
+ },
+ {
+ "name": "playerMessagesLogged",
+ "description": "Send a list of any messages that need to be delivered.",
+ "parameters": [
+ {
+ "name": "playerId",
+ "$ref": "PlayerId"
+ },
+ {
+ "name": "messages",
+ "type": "array",
+ "items": {
+ "$ref": "PlayerMessage"
+ }
+ }
+ ]
+ },
+ {
+ "name": "playerErrorsRaised",
+ "description": "Send a list of any errors that need to be delivered.",
+ "parameters": [
+ {
+ "name": "playerId",
+ "$ref": "PlayerId"
+ },
+ {
+ "name": "errors",
+ "type": "array",
+ "items": {
+ "$ref": "PlayerError"
+ }
+ }
+ ]
+ },
+ {
+ "name": "playersCreated",
+ "description": "Called whenever a player is created, or when a new agent joins and receives\na list of active players. If an agent is restored, it will receive the full\nlist of player ids and all events again.",
+ "parameters": [
+ {
+ "name": "players",
+ "type": "array",
+ "items": {
+ "$ref": "PlayerId"
+ }
+ }
+ ]
+ }
+ ],
+ "commands": [
+ {
+ "name": "enable",
+ "description": "Enables the Media domain"
+ },
+ {
+ "name": "disable",
+ "description": "Disables the Media domain."
}
]
}
diff --git a/generator/generate.py b/generator/generate.py
index 6f2d9d3..be5ad41 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -123,6 +123,12 @@ def ref_to_python(ref: str) -> str:
return f"{ref}"
+def ref_to_python_domain(ref: str, domain: str) -> str:
+ if ref.startswith(domain + '.'):
+ return ref_to_python(ref[len(domain)+1:])
+ return ref_to_python(ref)
+
+
class CdpPrimitiveType(Enum):
''' All of the CDP types that map directly to a Python type. '''
boolean = 'bool'
@@ -173,6 +179,7 @@ class CdpProperty:
optional: bool
experimental: bool
deprecated: bool
+ domain: str
@property
def py_name(self) -> str:
@@ -184,14 +191,14 @@ def py_annotation(self) -> str:
''' This property's Python type annotation. '''
if self.items:
if self.items.ref:
- py_ref = ref_to_python(self.items.ref)
+ py_ref = ref_to_python_domain(self.items.ref, self.domain)
ann = "typing.List[{}]".format(py_ref)
else:
ann = 'typing.List[{}]'.format(
CdpPrimitiveType.get_annotation(self.items.type))
else:
if self.ref:
- py_ref = ref_to_python(self.ref)
+ py_ref = ref_to_python_domain(self.ref, self.domain)
ann = py_ref
else:
ann = CdpPrimitiveType.get_annotation(
@@ -201,7 +208,7 @@ def py_annotation(self) -> str:
return ann
@classmethod
- def from_json(cls, property) -> 'CdpProperty':
+ def from_json(cls, property, domain: str) -> 'CdpProperty':
''' Instantiate a CDP property from a JSON object. '''
return cls(
property['name'],
@@ -213,6 +220,7 @@ def from_json(cls, property) -> 'CdpProperty':
property.get('optional', False),
property.get('experimental', False),
property.get('deprecated', False),
+ domain
)
def generate_decl(self) -> str:
@@ -253,14 +261,14 @@ def generate_from_json(self, dict_) -> str:
``dict_``. '''
if self.items:
if self.items.ref:
- py_ref = ref_to_python(self.items.ref)
+ py_ref = ref_to_python_domain(self.items.ref, self.domain)
expr = f"[{py_ref}.from_json(i) for i in {dict_}['{self.name}']]"
else:
cons = CdpPrimitiveType.get_constructor(self.items.type, 'i')
expr = f"[{cons} for i in {dict_}['{self.name}']]"
else:
if self.ref:
- py_ref = ref_to_python(self.ref)
+ py_ref = ref_to_python_domain(self.ref, self.domain)
expr = f"{py_ref}.from_json({dict_}['{self.name}'])"
else:
expr = CdpPrimitiveType.get_constructor(self.type,
@@ -281,7 +289,7 @@ class CdpType:
properties: typing.List[CdpProperty]
@classmethod
- def from_json(cls, type_) -> 'CdpType':
+ def from_json(cls, type_, domain: str) -> 'CdpType':
''' Instantiate a CDP type from a JSON object. '''
return cls(
type_['id'],
@@ -289,7 +297,7 @@ def from_json(cls, type_) -> 'CdpType':
type_['type'],
CdpItems.from_json(type_['items']) if 'items' in type_ else None,
type_.get('enum'),
- [CdpProperty.from_json(p) for p in type_.get('properties', list())],
+ [CdpProperty.from_json(p, domain) for p in type_.get('properties', list())],
)
def generate_code(self) -> str:
@@ -568,9 +576,9 @@ def from_json(cls, command, domain) -> 'CdpCommand':
command.get('description'),
command.get('experimental', False),
command.get('deprecated', False),
- [typing.cast(CdpParameter, CdpParameter.from_json(p)) for p in parameters],
- [typing.cast(CdpReturn, CdpReturn.from_json(r)) for r in returns],
- domain,
+ [typing.cast(CdpParameter, CdpParameter.from_json(p, domain)) for p in parameters],
+ [typing.cast(CdpReturn, CdpReturn.from_json(r, domain)) for r in returns],
+ domain
)
def generate_code(self) -> str:
@@ -594,11 +602,12 @@ def generate_code(self) -> str:
code += f'def {self.py_name}('
ret = f') -> {ret_type}:\n'
if self.parameters:
+ sorted_params = sorted(self.parameters, key=lambda param: 1 if param.optional else 0)
code += '\n'
code += indent(
- ',\n'.join(p.generate_code() for p in self.parameters), 8)
+ ',\n'.join(p.generate_code() for p in sorted_params), 8)
code += '\n'
- code += indent(ret, 4)
+ code += indent(ret, 4)
else:
code += ret
@@ -690,7 +699,7 @@ def from_json(cls, json: dict, domain: str):
json.get('description'),
json.get('deprecated', False),
json.get('experimental', False),
- [typing.cast(CdpParameter, CdpParameter.from_json(p))
+ [typing.cast(CdpParameter, CdpParameter.from_json(p, domain))
for p in json.get('parameters', list())],
domain
)
@@ -773,7 +782,7 @@ def from_json(cls, domain: dict):
domain.get('description'),
domain.get('experimental', False),
domain.get('dependencies', list()),
- [CdpType.from_json(type) for type in types],
+ [CdpType.from_json(type, domain_name) for type in types],
[CdpCommand.from_json(command, domain_name)
for command in commands],
[CdpEvent.from_json(event, domain_name) for event in events]
diff --git a/generator/js_protocol.json b/generator/js_protocol.json
index 004daa2..727d69e 100644
--- a/generator/js_protocol.json
+++ b/generator/js_protocol.json
@@ -157,6 +157,26 @@
}
]
},
+ {
+ "id": "LocationRange",
+ "description": "Location range within one script.",
+ "experimental": true,
+ "type": "object",
+ "properties": [
+ {
+ "name": "scriptId",
+ "$ref": "Runtime.ScriptId"
+ },
+ {
+ "name": "start",
+ "$ref": "ScriptPosition"
+ },
+ {
+ "name": "end",
+ "$ref": "ScriptPosition"
+ }
+ ]
+ },
{
"id": "CallFrame",
"description": "JavaScript call frame. Array of call frames form the call stack.",
@@ -227,7 +247,8 @@
"block",
"script",
"eval",
- "module"
+ "module",
+ "wasm-expression-stack"
]
},
{
@@ -302,6 +323,39 @@
]
}
]
+ },
+ {
+ "id": "ScriptLanguage",
+ "description": "Enum of possible script languages.",
+ "type": "string",
+ "enum": [
+ "JavaScript",
+ "WebAssembly"
+ ]
+ },
+ {
+ "id": "DebugSymbols",
+ "description": "Debug symbols available for a wasm script.",
+ "type": "object",
+ "properties": [
+ {
+ "name": "type",
+ "description": "Type of the debug symbols.",
+ "type": "string",
+ "enum": [
+ "None",
+ "SourceMap",
+ "EmbeddedDWARF",
+ "ExternalDWARF"
+ ]
+ },
+ {
+ "name": "externalURL",
+ "description": "URL of the external symbol source.",
+ "optional": true,
+ "type": "string"
+ }
+ ]
}
],
"commands": [
@@ -335,7 +389,7 @@
"parameters": [
{
"name": "maxScriptsCacheSize",
- "description": "The maximum size in bytes of collected scripts (not referenced by other heap objects)\nthe debugger can hold. Puts no limit if paramter is omitted.",
+ "description": "The maximum size in bytes of collected scripts (not referenced by other heap objects)\nthe debugger can hold. Puts no limit if parameter is omitted.",
"experimental": true,
"optional": true,
"type": "number"
@@ -469,7 +523,32 @@
"returns": [
{
"name": "scriptSource",
- "description": "Script source.",
+ "description": "Script source (empty in case of Wasm bytecode).",
+ "type": "string"
+ },
+ {
+ "name": "bytecode",
+ "description": "Wasm bytecode. (Encoded as a base64 string when passed over JSON)",
+ "optional": true,
+ "type": "string"
+ }
+ ]
+ },
+ {
+ "name": "getWasmBytecode",
+ "description": "This command is deprecated. Use getScriptSource instead.",
+ "deprecated": true,
+ "parameters": [
+ {
+ "name": "scriptId",
+ "description": "Id of the Wasm script to get source for.",
+ "$ref": "Runtime.ScriptId"
+ }
+ ],
+ "returns": [
+ {
+ "name": "bytecode",
+ "description": "Script source. (Encoded as a base64 string when passed over JSON)",
"type": "string"
}
]
@@ -498,6 +577,7 @@
{
"name": "pauseOnAsyncCall",
"experimental": true,
+ "deprecated": true,
"parameters": [
{
"name": "parentStackTraceId",
@@ -519,6 +599,7 @@
{
"name": "restartFrame",
"description": "Restarts particular call frame from the beginning.",
+ "deprecated": true,
"parameters": [
{
"name": "callFrameId",
@@ -552,7 +633,15 @@
},
{
"name": "resume",
- "description": "Resumes JavaScript execution."
+ "description": "Resumes JavaScript execution.",
+ "parameters": [
+ {
+ "name": "terminateOnResume",
+ "description": "Set to true to terminate execution upon resuming execution. In contrast\nto Runtime.terminateExecution, this will allows to execute further\nJavaScript (i.e. via evaluation) until execution of the paused code\nis actually resumed, at which point termination is triggered.\nIf execution is currently not paused, this parameter has no effect.",
+ "optional": true,
+ "type": "boolean"
+ }
+ ]
},
{
"name": "searchInContent",
@@ -909,10 +998,20 @@
"parameters": [
{
"name": "breakOnAsyncCall",
- "description": "Debugger will issue additional Debugger.paused notification if any async task is scheduled\nbefore next pause.",
+ "description": "Debugger will pause on the execution of the first async task which was scheduled\nbefore next pause.",
"experimental": true,
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "skipList",
+ "description": "The skipList specifies location ranges that should be skipped on step into.",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "LocationRange"
+ }
}
]
},
@@ -922,7 +1021,19 @@
},
{
"name": "stepOver",
- "description": "Steps over the statement."
+ "description": "Steps over the statement.",
+ "parameters": [
+ {
+ "name": "skipList",
+ "description": "The skipList specifies location ranges that should be skipped on step over.",
+ "experimental": true,
+ "optional": true,
+ "type": "array",
+ "items": {
+ "$ref": "LocationRange"
+ }
+ }
+ ]
}
],
"events": [
@@ -961,6 +1072,7 @@
"enum": [
"ambiguous",
"assert",
+ "CSPViolation",
"debugCommand",
"DOM",
"EventListener",
@@ -1002,8 +1114,9 @@
},
{
"name": "asyncCallStackTraceId",
- "description": "Just scheduled async call will have this stack trace as parent stack during async execution.\nThis field is available only after `Debugger.stepInto` call with `breakOnAsynCall` flag.",
+ "description": "Never present, will be removed.",
"experimental": true,
+ "deprecated": true,
"optional": true,
"$ref": "Runtime.StackTraceId"
}
@@ -1093,6 +1206,27 @@
"experimental": true,
"optional": true,
"$ref": "Runtime.StackTrace"
+ },
+ {
+ "name": "codeOffset",
+ "description": "If the scriptLanguage is WebAssembly, the code section offset in the module.",
+ "experimental": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "scriptLanguage",
+ "description": "The language of the script.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "Debugger.ScriptLanguage"
+ },
+ {
+ "name": "embedderName",
+ "description": "The name the embedder supplied for this script.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
}
]
},
@@ -1183,6 +1317,34 @@
"experimental": true,
"optional": true,
"$ref": "Runtime.StackTrace"
+ },
+ {
+ "name": "codeOffset",
+ "description": "If the scriptLanguage is WebAssembly, the code section offset in the module.",
+ "experimental": true,
+ "optional": true,
+ "type": "integer"
+ },
+ {
+ "name": "scriptLanguage",
+ "description": "The language of the script.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "Debugger.ScriptLanguage"
+ },
+ {
+ "name": "debugSymbols",
+ "description": "If the scriptLanguage is WebASsembly, the source of debug symbols for the module.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "Debugger.DebugSymbols"
+ },
+ {
+ "name": "embedderName",
+ "description": "The name the embedder supplied for this script.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
}
]
}
@@ -1380,6 +1542,17 @@
"description": "If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken\nwhen the tracking is stopped.",
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "treatGlobalObjectsAsRoots",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "captureNumericValue",
+ "description": "If true, numerical values are included in the snapshot",
+ "optional": true,
+ "type": "boolean"
}
]
},
@@ -1391,6 +1564,18 @@
"description": "If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken.",
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "treatGlobalObjectsAsRoots",
+ "description": "If true, a raw snapshot without artificial roots will be generated",
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "captureNumericValue",
+ "description": "If true, numerical values are included in the snapshot",
+ "optional": true,
+ "type": "boolean"
}
]
}
@@ -1753,6 +1938,19 @@
"description": "Collect block-based coverage.",
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "allowTriggeredUpdates",
+ "description": "Allow the backend to send updates on its own initiative",
+ "optional": true,
+ "type": "boolean"
+ }
+ ],
+ "returns": [
+ {
+ "name": "timestamp",
+ "description": "Monotonically increasing time (in seconds) when the coverage update was taken in the backend.",
+ "type": "number"
}
]
},
@@ -1791,6 +1989,11 @@
"items": {
"$ref": "ScriptCoverage"
}
+ },
+ {
+ "name": "timestamp",
+ "description": "Monotonically increasing time (in seconds) when the coverage update was taken in the backend.",
+ "type": "number"
}
]
},
@@ -1855,6 +2058,31 @@
"type": "string"
}
]
+ },
+ {
+ "name": "preciseCoverageDeltaUpdate",
+ "description": "Reports coverage delta since the last poll (either from an event like this, or from\n`takePreciseCoverage` for the current isolate. May only be sent if precise code\ncoverage has been started. This event can be trigged by the embedder to, for example,\ntrigger collection of coverage data immediately at a certain point in time.",
+ "experimental": true,
+ "parameters": [
+ {
+ "name": "timestamp",
+ "description": "Monotonically increasing time (in seconds) when the coverage update was taken in the backend.",
+ "type": "number"
+ },
+ {
+ "name": "occasion",
+ "description": "Identifier for distinguishing coverage events.",
+ "type": "string"
+ },
+ {
+ "name": "result",
+ "description": "Coverage data for the current isolate.",
+ "type": "array",
+ "items": {
+ "$ref": "ScriptCoverage"
+ }
+ }
+ ]
}
]
},
@@ -1899,7 +2127,7 @@
},
{
"name": "subtype",
- "description": "Object subtype hint. Specified for `object` type values only.",
+ "description": "Object subtype hint. Specified for `object` type values only.\nNOTE: If you change anything here, make sure to also update\n`subtype` in `ObjectPreview` and `PropertyPreview` below.",
"optional": true,
"type": "string",
"enum": [
@@ -1919,7 +2147,9 @@
"promise",
"typedarray",
"arraybuffer",
- "dataview"
+ "dataview",
+ "webassemblymemory",
+ "wasmvalue"
]
},
{
@@ -2023,7 +2253,14 @@
"weakset",
"iterator",
"generator",
- "error"
+ "error",
+ "proxy",
+ "promise",
+ "typedarray",
+ "arraybuffer",
+ "dataview",
+ "webassemblymemory",
+ "wasmvalue"
]
},
{
@@ -2111,7 +2348,14 @@
"weakset",
"iterator",
"generator",
- "error"
+ "error",
+ "proxy",
+ "promise",
+ "typedarray",
+ "arraybuffer",
+ "dataview",
+ "webassemblymemory",
+ "wasmvalue"
]
}
]
@@ -2230,6 +2474,19 @@
{
"name": "value",
"description": "The value associated with the private property.",
+ "optional": true,
+ "$ref": "RemoteObject"
+ },
+ {
+ "name": "get",
+ "description": "A function which serves as a getter for the private property,\nor `undefined` if there is no getter (accessor descriptors only).",
+ "optional": true,
+ "$ref": "RemoteObject"
+ },
+ {
+ "name": "set",
+ "description": "A function which serves as a setter for the private property,\nor `undefined` if there is no setter (accessor descriptors only).",
+ "optional": true,
"$ref": "RemoteObject"
}
]
@@ -2284,6 +2541,12 @@
"description": "Human readable name describing given context.",
"type": "string"
},
+ {
+ "name": "uniqueId",
+ "description": "A system-unique execution context identifier. Unlike the id, this is unique across\nmultiple processes, so can be reliably used to identify specific context while backend\nperforms a cross-process navigation.",
+ "experimental": true,
+ "type": "string"
+ },
{
"name": "auxData",
"description": "Embedder-specific auxiliary data.",
@@ -2346,6 +2609,13 @@
"description": "Identifier of the context where exception happened.",
"optional": true,
"$ref": "ExecutionContextId"
+ },
+ {
+ "name": "exceptionMetaData",
+ "description": "Dictionary with entries of meta data that the client associated\nwith this exception, such as information about associated network\nrequests, etc.",
+ "experimental": true,
+ "optional": true,
+ "type": "object"
}
]
},
@@ -2552,6 +2822,13 @@
"description": "Symbolic group name that can be used to release multiple objects. If objectGroup is not\nspecified and objectId is, objectGroup will be inherited from object.",
"optional": true,
"type": "string"
+ },
+ {
+ "name": "throwOnSideEffect",
+ "description": "Whether to throw an exception if side effect cannot be ruled out during evaluation.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
@@ -2650,7 +2927,7 @@
},
{
"name": "contextId",
- "description": "Specifies in which execution context to perform evaluation. If the parameter is omitted the\nevaluation will be performed in the context of the inspected page.",
+ "description": "Specifies in which execution context to perform evaluation. If the parameter is omitted the\nevaluation will be performed in the context of the inspected page.\nThis is mutually exclusive with `uniqueContextId`, which offers an\nalternative way to identify the execution context that is more reliable\nin a multi-process environment.",
"optional": true,
"$ref": "ExecutionContextId"
},
@@ -2681,7 +2958,7 @@
},
{
"name": "throwOnSideEffect",
- "description": "Whether to throw an exception if side effect cannot be ruled out during evaluation.",
+ "description": "Whether to throw an exception if side effect cannot be ruled out during evaluation.\nThis implies `disableBreaks` below.",
"experimental": true,
"optional": true,
"type": "boolean"
@@ -2692,6 +2969,34 @@
"experimental": true,
"optional": true,
"$ref": "TimeDelta"
+ },
+ {
+ "name": "disableBreaks",
+ "description": "Disable breakpoints during execution.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "replMode",
+ "description": "Setting this flag to true enables `let` re-declaration and top-level `await`.\nNote that `let` variables can only be re-declared if they originate from\n`replMode` themselves.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "allowUnsafeEvalBlockedByCSP",
+ "description": "The Content Security Policy (CSP) for the target might block 'unsafe-eval'\nwhich includes eval(), Function(), setTimeout() and setInterval()\nwhen called with non-callable arguments. This flag bypasses CSP for this\nevaluation and allows unsafe-eval. Defaults to true.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
+ },
+ {
+ "name": "uniqueContextId",
+ "description": "An alternative way to specify the execution context to evaluate in.\nCompared to contextId that may be reused across processes, this is guaranteed to be\nsystem-unique, so it can be used to prevent accidental evaluation of the expression\nin context different than intended (e.g. as a result of navigation across process\nboundaries).\nThis is mutually exclusive with `contextId`.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
}
],
"returns": [
@@ -2765,6 +3070,13 @@
"experimental": true,
"optional": true,
"type": "boolean"
+ },
+ {
+ "name": "nonIndexedPropertiesOnly",
+ "description": "If true, returns non-indexed properties only.",
+ "experimental": true,
+ "optional": true,
+ "type": "boolean"
}
],
"returns": [
@@ -2978,7 +3290,7 @@
},
{
"name": "addBinding",
- "description": "If executionContextId is empty, adds binding with the given name on the\nglobal objects of all inspected contexts, including those created later,\nbindings survive reloads.\nIf executionContextId is specified, adds binding only on global object of\ngiven execution context.\nBinding function takes exactly one argument, this argument should be string,\nin case of any other input, function throws an exception.\nEach binding function call produces Runtime.bindingCalled notification.",
+ "description": "If executionContextId is empty, adds binding with the given name on the\nglobal objects of all inspected contexts, including those created later,\nbindings survive reloads.\nBinding function takes exactly one argument, this argument should be string,\nin case of any other input, function throws an exception.\nEach binding function call produces Runtime.bindingCalled notification.",
"experimental": true,
"parameters": [
{
@@ -2987,8 +3299,17 @@
},
{
"name": "executionContextId",
+ "description": "If specified, the binding would only be exposed to the specified\nexecution context. If omitted and `executionContextName` is not set,\nthe binding is exposed to all execution contexts of the target.\nThis parameter is mutually exclusive with `executionContextName`.\nDeprecated in favor of `executionContextName` due to an unclear use case\nand bugs in implementation (crbug.com/1169639). `executionContextId` will be\nremoved in the future.",
+ "deprecated": true,
"optional": true,
"$ref": "ExecutionContextId"
+ },
+ {
+ "name": "executionContextName",
+ "description": "If specified, the binding is exposed to the executionContext with\nmatching name, even for contexts created after the binding is added.\nSee also `ExecutionContext.name` and `worldName` parameter to\n`Page.addScriptToEvaluateOnNewDocument`.\nThis parameter is mutually exclusive with `executionContextId`.",
+ "experimental": true,
+ "optional": true,
+ "type": "string"
}
]
},
@@ -3155,6 +3476,13 @@
{
"name": "hints",
"type": "object"
+ },
+ {
+ "name": "executionContextId",
+ "description": "Identifier of the context where the call was made.",
+ "experimental": true,
+ "optional": true,
+ "$ref": "ExecutionContextId"
}
]
}
diff --git a/generator/test_generate.py b/generator/test_generate.py
index e8b2344..f2b2654 100644
--- a/generator/test_generate.py
+++ b/generator/test_generate.py
@@ -68,7 +68,7 @@ def from_json(cls, json: str) -> AXNodeId:
def __repr__(self):
return 'AXNodeId({})'.format(super().__repr__())""")
- type = CdpType.from_json(json_type)
+ type = CdpType.from_json(json_type, '')
actual = type.generate_code()
assert expected == actual
@@ -97,7 +97,7 @@ def from_json(cls, json: typing.List[StringIndex]) -> ArrayOfStrings:
def __repr__(self):
return 'ArrayOfStrings({})'.format(super().__repr__())""")
- type = CdpType.from_json(json_type)
+ type = CdpType.from_json(json_type, '')
actual = type.generate_code()
assert expected == actual
@@ -135,7 +135,7 @@ def to_json(self) -> str:
def from_json(cls, json: str) -> AXValueSourceType:
return cls(json)""")
- type = CdpType.from_json(json_type)
+ type = CdpType.from_json(json_type, '')
actual = type.generate_code()
assert expected == actual
@@ -215,7 +215,7 @@ def from_json(cls, json: T_JSON_DICT) -> AXValue:
sources=[AXValueSource.from_json(i) for i in json['sources']] if 'sources' in json else None,
)""")
- type = CdpType.from_json(json_type)
+ type = CdpType.from_json(json_type, '')
actual = type.generate_code()
assert expected == actual
From d68802f30e53445522ae0bd7d38bee02c5ee659d Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 19:46:07 -0300
Subject: [PATCH 26/81] use relative import for util module
---
cdp/accessibility.py | 4 ++--
cdp/animation.py | 4 ++--
cdp/audits.py | 4 ++--
cdp/background_service.py | 4 ++--
cdp/browser.py | 4 ++--
cdp/cache_storage.py | 4 ++--
cdp/cast.py | 4 ++--
cdp/console.py | 4 ++--
cdp/css.py | 4 ++--
cdp/database.py | 4 ++--
cdp/debugger.py | 4 ++--
cdp/device_orientation.py | 4 ++--
cdp/dom.py | 4 ++--
cdp/dom_debugger.py | 4 ++--
cdp/dom_snapshot.py | 4 ++--
cdp/dom_storage.py | 4 ++--
cdp/emulation.py | 4 ++--
cdp/event_breakpoints.py | 4 ++--
cdp/fetch.py | 4 ++--
cdp/headless_experimental.py | 4 ++--
cdp/heap_profiler.py | 4 ++--
cdp/indexed_db.py | 4 ++--
cdp/input_.py | 4 ++--
cdp/inspector.py | 4 ++--
cdp/io.py | 4 ++--
cdp/layer_tree.py | 4 ++--
cdp/log.py | 4 ++--
cdp/media.py | 4 ++--
cdp/memory.py | 4 ++--
cdp/network.py | 4 ++--
cdp/overlay.py | 4 ++--
cdp/page.py | 4 ++--
cdp/performance.py | 4 ++--
cdp/performance_timeline.py | 4 ++--
cdp/profiler.py | 4 ++--
cdp/runtime.py | 4 ++--
cdp/schema.py | 4 ++--
cdp/security.py | 4 ++--
cdp/service_worker.py | 4 ++--
cdp/storage.py | 4 ++--
cdp/system_info.py | 4 ++--
cdp/target.py | 4 ++--
cdp/tethering.py | 4 ++--
cdp/tracing.py | 4 ++--
cdp/web_audio.py | 4 ++--
cdp/web_authn.py | 4 ++--
generator/generate.py | 4 ++--
47 files changed, 94 insertions(+), 94 deletions(-)
diff --git a/cdp/accessibility.py b/cdp/accessibility.py
index 010cf03..43cfa7f 100644
--- a/cdp/accessibility.py
+++ b/cdp/accessibility.py
@@ -6,10 +6,10 @@
# CDP domain: Accessibility (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import page
diff --git a/cdp/animation.py b/cdp/animation.py
index e47b0e4..d4c625d 100644
--- a/cdp/animation.py
+++ b/cdp/animation.py
@@ -6,10 +6,10 @@
# CDP domain: Animation (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import runtime
diff --git a/cdp/audits.py b/cdp/audits.py
index 40e3487..0443193 100644
--- a/cdp/audits.py
+++ b/cdp/audits.py
@@ -6,10 +6,10 @@
# CDP domain: Audits (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import network
diff --git a/cdp/background_service.py b/cdp/background_service.py
index 49e0696..f73331f 100644
--- a/cdp/background_service.py
+++ b/cdp/background_service.py
@@ -6,10 +6,10 @@
# CDP domain: BackgroundService (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import network
from . import service_worker
diff --git a/cdp/browser.py b/cdp/browser.py
index b9e7805..1646a1d 100644
--- a/cdp/browser.py
+++ b/cdp/browser.py
@@ -6,10 +6,10 @@
# CDP domain: Browser
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import page
from . import target
diff --git a/cdp/cache_storage.py b/cdp/cache_storage.py
index f0d40a6..0110ac7 100644
--- a/cdp/cache_storage.py
+++ b/cdp/cache_storage.py
@@ -6,10 +6,10 @@
# CDP domain: CacheStorage (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class CacheId(str):
diff --git a/cdp/cast.py b/cdp/cast.py
index 4ee4045..a334b30 100644
--- a/cdp/cast.py
+++ b/cdp/cast.py
@@ -6,10 +6,10 @@
# CDP domain: Cast (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/console.py b/cdp/console.py
index bf7e9c1..ba67f39 100644
--- a/cdp/console.py
+++ b/cdp/console.py
@@ -6,10 +6,10 @@
# CDP domain: Console
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/css.py b/cdp/css.py
index 668e8ab..3c4181d 100644
--- a/cdp/css.py
+++ b/cdp/css.py
@@ -6,10 +6,10 @@
# CDP domain: CSS (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import page
diff --git a/cdp/database.py b/cdp/database.py
index 71dac2c..b8052e4 100644
--- a/cdp/database.py
+++ b/cdp/database.py
@@ -6,10 +6,10 @@
# CDP domain: Database (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class DatabaseId(str):
diff --git a/cdp/debugger.py b/cdp/debugger.py
index adc0cb4..ad67882 100644
--- a/cdp/debugger.py
+++ b/cdp/debugger.py
@@ -6,10 +6,10 @@
# CDP domain: Debugger
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import runtime
from deprecated.sphinx import deprecated # type: ignore
diff --git a/cdp/device_orientation.py b/cdp/device_orientation.py
index 1296997..7e67363 100644
--- a/cdp/device_orientation.py
+++ b/cdp/device_orientation.py
@@ -6,10 +6,10 @@
# CDP domain: DeviceOrientation (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
def clear_device_orientation_override() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
diff --git a/cdp/dom.py b/cdp/dom.py
index 1faf4d1..dd5a8ee 100644
--- a/cdp/dom.py
+++ b/cdp/dom.py
@@ -6,10 +6,10 @@
# CDP domain: DOM
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import page
from . import runtime
diff --git a/cdp/dom_debugger.py b/cdp/dom_debugger.py
index d1589ac..19fd2bc 100644
--- a/cdp/dom_debugger.py
+++ b/cdp/dom_debugger.py
@@ -6,10 +6,10 @@
# CDP domain: DOMDebugger
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import runtime
diff --git a/cdp/dom_snapshot.py b/cdp/dom_snapshot.py
index 4a93c19..c9e2513 100644
--- a/cdp/dom_snapshot.py
+++ b/cdp/dom_snapshot.py
@@ -6,10 +6,10 @@
# CDP domain: DOMSnapshot (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import dom_debugger
diff --git a/cdp/dom_storage.py b/cdp/dom_storage.py
index 700f9cd..ca23937 100644
--- a/cdp/dom_storage.py
+++ b/cdp/dom_storage.py
@@ -6,10 +6,10 @@
# CDP domain: DOMStorage (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/emulation.py b/cdp/emulation.py
index 40e10e8..0fd4269 100644
--- a/cdp/emulation.py
+++ b/cdp/emulation.py
@@ -6,10 +6,10 @@
# CDP domain: Emulation
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import network
diff --git a/cdp/event_breakpoints.py b/cdp/event_breakpoints.py
index 5eb47fa..719ebd1 100644
--- a/cdp/event_breakpoints.py
+++ b/cdp/event_breakpoints.py
@@ -6,10 +6,10 @@
# CDP domain: EventBreakpoints (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
def set_instrumentation_breakpoint(
diff --git a/cdp/fetch.py b/cdp/fetch.py
index 1708f6a..9c19963 100644
--- a/cdp/fetch.py
+++ b/cdp/fetch.py
@@ -6,10 +6,10 @@
# CDP domain: Fetch
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import io
from . import network
diff --git a/cdp/headless_experimental.py b/cdp/headless_experimental.py
index a4d6c4b..37be695 100644
--- a/cdp/headless_experimental.py
+++ b/cdp/headless_experimental.py
@@ -6,10 +6,10 @@
# CDP domain: HeadlessExperimental (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from deprecated.sphinx import deprecated # type: ignore
diff --git a/cdp/heap_profiler.py b/cdp/heap_profiler.py
index 1ea8e45..cf6717d 100644
--- a/cdp/heap_profiler.py
+++ b/cdp/heap_profiler.py
@@ -6,10 +6,10 @@
# CDP domain: HeapProfiler (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import runtime
diff --git a/cdp/indexed_db.py b/cdp/indexed_db.py
index 56f1328..06fe96d 100644
--- a/cdp/indexed_db.py
+++ b/cdp/indexed_db.py
@@ -6,10 +6,10 @@
# CDP domain: IndexedDB (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import runtime
diff --git a/cdp/input_.py b/cdp/input_.py
index 8c67b46..4705f37 100644
--- a/cdp/input_.py
+++ b/cdp/input_.py
@@ -6,10 +6,10 @@
# CDP domain: Input
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/inspector.py b/cdp/inspector.py
index b2e33ea..77f318f 100644
--- a/cdp/inspector.py
+++ b/cdp/inspector.py
@@ -6,10 +6,10 @@
# CDP domain: Inspector (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
diff --git a/cdp/io.py b/cdp/io.py
index 7cd96b1..3590778 100644
--- a/cdp/io.py
+++ b/cdp/io.py
@@ -6,10 +6,10 @@
# CDP domain: IO
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import runtime
diff --git a/cdp/layer_tree.py b/cdp/layer_tree.py
index da934b4..c94d67c 100644
--- a/cdp/layer_tree.py
+++ b/cdp/layer_tree.py
@@ -6,10 +6,10 @@
# CDP domain: LayerTree (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
diff --git a/cdp/log.py b/cdp/log.py
index 4932020..44db254 100644
--- a/cdp/log.py
+++ b/cdp/log.py
@@ -6,10 +6,10 @@
# CDP domain: Log
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import network
from . import runtime
diff --git a/cdp/media.py b/cdp/media.py
index 3699f4c..fd15922 100644
--- a/cdp/media.py
+++ b/cdp/media.py
@@ -6,10 +6,10 @@
# CDP domain: Media (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class PlayerId(str):
diff --git a/cdp/memory.py b/cdp/memory.py
index 6647a7f..2336ce1 100644
--- a/cdp/memory.py
+++ b/cdp/memory.py
@@ -6,10 +6,10 @@
# CDP domain: Memory (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class PressureLevel(enum.Enum):
diff --git a/cdp/network.py b/cdp/network.py
index 2826d71..0d10271 100644
--- a/cdp/network.py
+++ b/cdp/network.py
@@ -6,10 +6,10 @@
# CDP domain: Network
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import debugger
from . import emulation
diff --git a/cdp/overlay.py b/cdp/overlay.py
index 31b788e..4d72822 100644
--- a/cdp/overlay.py
+++ b/cdp/overlay.py
@@ -6,10 +6,10 @@
# CDP domain: Overlay (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import page
diff --git a/cdp/page.py b/cdp/page.py
index e4ac78f..44e93e8 100644
--- a/cdp/page.py
+++ b/cdp/page.py
@@ -6,10 +6,10 @@
# CDP domain: Page
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import debugger
from . import dom
diff --git a/cdp/performance.py b/cdp/performance.py
index 1d5f2b8..b2193d0 100644
--- a/cdp/performance.py
+++ b/cdp/performance.py
@@ -6,10 +6,10 @@
# CDP domain: Performance
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from deprecated.sphinx import deprecated # type: ignore
diff --git a/cdp/performance_timeline.py b/cdp/performance_timeline.py
index ac95409..408849d 100644
--- a/cdp/performance_timeline.py
+++ b/cdp/performance_timeline.py
@@ -6,10 +6,10 @@
# CDP domain: PerformanceTimeline (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import dom
from . import network
diff --git a/cdp/profiler.py b/cdp/profiler.py
index d08a84f..0dc100f 100644
--- a/cdp/profiler.py
+++ b/cdp/profiler.py
@@ -6,10 +6,10 @@
# CDP domain: Profiler
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import debugger
from . import runtime
diff --git a/cdp/runtime.py b/cdp/runtime.py
index 1f833f8..043a762 100644
--- a/cdp/runtime.py
+++ b/cdp/runtime.py
@@ -6,10 +6,10 @@
# CDP domain: Runtime
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class ScriptId(str):
diff --git a/cdp/schema.py b/cdp/schema.py
index 0eea84d..2898475 100644
--- a/cdp/schema.py
+++ b/cdp/schema.py
@@ -6,10 +6,10 @@
# CDP domain: Schema
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/security.py b/cdp/security.py
index cbd4583..c8d4c98 100644
--- a/cdp/security.py
+++ b/cdp/security.py
@@ -6,10 +6,10 @@
# CDP domain: Security
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import network
from deprecated.sphinx import deprecated # type: ignore
diff --git a/cdp/service_worker.py b/cdp/service_worker.py
index 4bbee18..3a4db60 100644
--- a/cdp/service_worker.py
+++ b/cdp/service_worker.py
@@ -6,10 +6,10 @@
# CDP domain: ServiceWorker (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import target
diff --git a/cdp/storage.py b/cdp/storage.py
index 9c0a2ae..75f1b6b 100644
--- a/cdp/storage.py
+++ b/cdp/storage.py
@@ -6,10 +6,10 @@
# CDP domain: Storage (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import browser
from . import network
diff --git a/cdp/system_info.py b/cdp/system_info.py
index a85621f..4785d98 100644
--- a/cdp/system_info.py
+++ b/cdp/system_info.py
@@ -6,10 +6,10 @@
# CDP domain: SystemInfo (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
@dataclass
diff --git a/cdp/target.py b/cdp/target.py
index 5e3d4a2..cb0b707 100644
--- a/cdp/target.py
+++ b/cdp/target.py
@@ -6,10 +6,10 @@
# CDP domain: Target
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import browser
from . import page
diff --git a/cdp/tethering.py b/cdp/tethering.py
index 5ca8510..d8ac991 100644
--- a/cdp/tethering.py
+++ b/cdp/tethering.py
@@ -6,10 +6,10 @@
# CDP domain: Tethering (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
def bind(
diff --git a/cdp/tracing.py b/cdp/tracing.py
index e301790..84eeb4b 100644
--- a/cdp/tracing.py
+++ b/cdp/tracing.py
@@ -6,10 +6,10 @@
# CDP domain: Tracing (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
from . import io
diff --git a/cdp/web_audio.py b/cdp/web_audio.py
index fb89592..71dc4ce 100644
--- a/cdp/web_audio.py
+++ b/cdp/web_audio.py
@@ -6,10 +6,10 @@
# CDP domain: WebAudio (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class GraphObjectId(str):
diff --git a/cdp/web_authn.py b/cdp/web_authn.py
index 727d36a..4cec9c2 100644
--- a/cdp/web_authn.py
+++ b/cdp/web_authn.py
@@ -6,10 +6,10 @@
# CDP domain: WebAuthn (experimental)
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
class AuthenticatorId(str):
diff --git a/generator/generate.py b/generator/generate.py
index be5ad41..0b37109 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -32,10 +32,10 @@
# CDP domain: {{}}{{}}
from __future__ import annotations
-from cdp.util import event_class, T_JSON_DICT
-from dataclasses import dataclass
import enum
import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
'''.format(SHARED_HEADER)
From d15c886b2a08981d66deb2878cfa7616f7c78850 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 20:14:40 -0300
Subject: [PATCH 27/81] use relative import in init
---
cdp/__init__.py | 49 +------------------------------------------
generator/generate.py | 4 +---
2 files changed, 2 insertions(+), 51 deletions(-)
diff --git a/cdp/__init__.py b/cdp/__init__.py
index 5ae05c7..bfead84 100644
--- a/cdp/__init__.py
+++ b/cdp/__init__.py
@@ -3,51 +3,4 @@
# This file is generated from the CDP specification. If you need to make
# changes, edit the generator and regenerate all of the modules.
-import cdp.util
-
-import cdp.accessibility
-import cdp.animation
-import cdp.audits
-import cdp.background_service
-import cdp.browser
-import cdp.css
-import cdp.cache_storage
-import cdp.cast
-import cdp.console
-import cdp.dom
-import cdp.dom_debugger
-import cdp.dom_snapshot
-import cdp.dom_storage
-import cdp.database
-import cdp.debugger
-import cdp.device_orientation
-import cdp.emulation
-import cdp.event_breakpoints
-import cdp.fetch
-import cdp.headless_experimental
-import cdp.heap_profiler
-import cdp.io
-import cdp.indexed_db
-import cdp.input_
-import cdp.inspector
-import cdp.layer_tree
-import cdp.log
-import cdp.media
-import cdp.memory
-import cdp.network
-import cdp.overlay
-import cdp.page
-import cdp.performance
-import cdp.performance_timeline
-import cdp.profiler
-import cdp.runtime
-import cdp.schema
-import cdp.security
-import cdp.service_worker
-import cdp.storage
-import cdp.system_info
-import cdp.target
-import cdp.tethering
-import cdp.tracing
-import cdp.web_audio
-import cdp.web_authn
+from . import (accessibility, animation, audits, background_service, browser, css, cache_storage, cast, console, dom, dom_debugger, dom_snapshot, dom_storage, database, debugger, device_orientation, emulation, event_breakpoints, fetch, headless_experimental, heap_profiler, io, indexed_db, input_, inspector, layer_tree, log, media, memory, network, overlay, page, performance, performance_timeline, profiler, runtime, schema, security, service_worker, storage, system_info, target, tethering, tracing, web_audio, web_authn)
\ No newline at end of file
diff --git a/generator/generate.py b/generator/generate.py
index 0b37109..28beeeb 100644
--- a/generator/generate.py
+++ b/generator/generate.py
@@ -938,9 +938,7 @@ def generate_init(init_path, domains):
'''
with init_path.open('w') as init_file:
init_file.write(INIT_HEADER)
- init_file.write('import cdp.util\n\n')
- for domain in domains:
- init_file.write('import cdp.{}\n'.format(domain.module))
+ init_file.write('from . import ({})'.format(', '.join(domain.module for domain in domains)))
def generate_docs(docs_path, domains):
From 38c2771436b3b6c5948f29c7e287b82b874f56a4 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 20:39:18 -0300
Subject: [PATCH 28/81] add fork notice to readme
---
README.md | 13 ++++---------
1 file changed, 4 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index ebdbe49..4fcf6d9 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,7 @@
# PyCDP
+Up to date fork of [python-chrome-devtools-protocol][1]. Currently supports CDP [r970581][2] (Chrome 97).
-[](https://pypi.org/project/chrome-devtools-protocol/)
-
-
-[](https://travis-ci.com/HyperionGray/python-chrome-devtools-protocol)
-[](https://py-cdp.readthedocs.io)
-
+## Description
Python Chrome DevTools Protocol (shortened to PyCDP) is a library that provides
Python wrappers for the types, commands, and events specified in the [Chrome
DevTools Protocol](https://github.com/ChromeDevTools/devtools-protocol/).
@@ -27,6 +23,5 @@ opening a socket or negotiating a WebSocket protocol. Instead, that
responsibility is left to higher-level libraries, for example
[trio-chrome-devtools-protocol](https://github.com/hyperiongray/trio-chrome-devtools-protocol).
-For more information, see the [complete documentation](https://py-cdp.readthedocs.io).
-
-
+[1]: https://github.com/HyperionGray/python-chrome-devtools-protocol
+[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
\ No newline at end of file
From 541efeb7ce54bf49f5a7fd26db8eda3bca3dfd62 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 20:40:34 -0300
Subject: [PATCH 29/81] add fork notice to readme
---
README.md | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/README.md b/README.md
index 4fcf6d9..3362534 100644
--- a/README.md
+++ b/README.md
@@ -23,5 +23,11 @@ opening a socket or negotiating a WebSocket protocol. Instead, that
responsibility is left to higher-level libraries, for example
[trio-chrome-devtools-protocol](https://github.com/hyperiongray/trio-chrome-devtools-protocol).
+
+
+Copyright © 2018 Hyperion Gray
+Copyright © 2022 Heraldo Lucena
+
+
[1]: https://github.com/HyperionGray/python-chrome-devtools-protocol
[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
\ No newline at end of file
From 018e2061ee0bca133e89cf2d8b0843347e47896d Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 22:32:11 -0300
Subject: [PATCH 30/81] add standalone CDP types generation
---
.gitignore | 19 ++--
Makefile | 6 +-
README.md | 22 ++++
cdp/util.py | 1 -
{generator => cdpgen}/README.md | 0
cdpgen/__init__.py | 0
{generator => cdpgen}/browser_protocol.json | 0
{generator => cdpgen}/generate.py | 114 ++++++++++++++++----
{generator => cdpgen}/js_protocol.json | 0
{generator => cdpgen}/test_generate.py | 2 +-
pyproject.toml | 3 +
11 files changed, 131 insertions(+), 36 deletions(-)
rename {generator => cdpgen}/README.md (100%)
create mode 100644 cdpgen/__init__.py
rename {generator => cdpgen}/browser_protocol.json (100%)
rename {generator => cdpgen}/generate.py (92%)
rename {generator => cdpgen}/js_protocol.json (100%)
rename {generator => cdpgen}/test_generate.py (99%)
diff --git a/.gitignore b/.gitignore
index f8ddaa9..3eae1d5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,10 +1,11 @@
-*.egg-info
-.ipynb_checkpoints
-.mypy_cache
-.pytest_cache
__pycache__
-.vscode
-build
-dist
-docs/_build
-venv
+/build
+/dist
+/docs/_build
+/.venv
+/*.egg-info
+/.ipynb_checkpoints
+/.mypy_cache
+/.pytest_cache
+/.vscode
+/.tool-versions
diff --git a/Makefile b/Makefile
index 8aac250..dde37e5 100644
--- a/Makefile
+++ b/Makefile
@@ -9,19 +9,19 @@ docs:
$(MAKE) -C docs html
generate:
- python generator/generate.py
+ python cdpgen/generate.py
mypy-cdp:
mypy cdp/
mypy-generate:
- mypy generator/
+ mypy cdpgen/
test-cdp:
pytest test/
test-generate:
- pytest generator/
+ pytest cdpgen/
test-import:
python -c 'import cdp; print(cdp.accessibility)'
diff --git a/README.md b/README.md
index 3362534..ea2d757 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,28 @@
# PyCDP
Up to date fork of [python-chrome-devtools-protocol][1]. Currently supports CDP [r970581][2] (Chrome 97).
+## Usage
+You can install this package as a dependency to use the builtin CDP types with `import cdp`, but if you want to try a different CDP version you can build new wrappers with `cdpgen` command:
+```
+usage: cdpgen
+
+Generate Python types for the Chrome Devtools Protocol API. JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol
+
+optional arguments:
+ -h, --help show this help message and exit
+ --browser-protocol BROWSER_PROTOCOL
+ JSON file for the browser protocol
+ --js-protocol JS_PROTOCOL
+ JSON file for the javascript protocol
+ --output OUTPUT output path for the generated Python modules
+
+```
+Example:
+```sh
+cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json --output /tmp/cdp
+```
+You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
+
## Description
Python Chrome DevTools Protocol (shortened to PyCDP) is a library that provides
Python wrappers for the types, commands, and events specified in the [Chrome
diff --git a/cdp/util.py b/cdp/util.py
index 1403b48..82c9de3 100644
--- a/cdp/util.py
+++ b/cdp/util.py
@@ -1,4 +1,3 @@
-import cdp
import typing
diff --git a/generator/README.md b/cdpgen/README.md
similarity index 100%
rename from generator/README.md
rename to cdpgen/README.md
diff --git a/cdpgen/__init__.py b/cdpgen/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/generator/browser_protocol.json b/cdpgen/browser_protocol.json
similarity index 100%
rename from generator/browser_protocol.json
rename to cdpgen/browser_protocol.json
diff --git a/generator/generate.py b/cdpgen/generate.py
similarity index 92%
rename from generator/generate.py
rename to cdpgen/generate.py
index 28beeeb..f602a52 100644
--- a/generator/generate.py
+++ b/cdpgen/generate.py
@@ -1,33 +1,41 @@
-import builtins
-from dataclasses import dataclass
-from enum import Enum
-import itertools
+import re
+import os
import json
+import typing
+import builtins
import logging
import operator
-import os
+import itertools
+import inflection # type: ignore
+from enum import Enum
from pathlib import Path
-import re
+from dataclasses import dataclass
+from argparse import ArgumentParser, ArgumentTypeError
from textwrap import dedent, indent as tw_indent
-import typing
-
-import inflection # type: ignore
log_level = getattr(logging, os.environ.get('LOG_LEVEL', 'info').upper())
logging.basicConfig(level=log_level)
-logger = logging.getLogger('generate')
+logger = logging.getLogger('cdpgen')
-SHARED_HEADER = '''# DO NOT EDIT THIS FILE!
+
+GENERATED_PACKAGE_NOTICE = """## Generated by PyCDP
+The modules of this package were generated by [pycdp][1], do not modify their contents because the
+changes will be overwritten in next generations.
+
+[1]: https://github.com/HMaker/python-chrome-devtools-protocol
+"""
+
+SHARED_HEADER = """# DO NOT EDIT THIS FILE!
#
# This file is generated from the CDP specification. If you need to make
-# changes, edit the generator and regenerate all of the modules.'''
+# changes, edit the generator and regenerate all of the modules."""
-INIT_HEADER = '''{}
+INIT_HEADER = """{}
-'''.format(SHARED_HEADER)
+""".format(SHARED_HEADER)
-MODULE_HEADER = '''{}
+MODULE_HEADER = """{}
#
# CDP domain: {{}}{{}}
@@ -37,19 +45,18 @@
from dataclasses import dataclass
from .util import event_class, T_JSON_DICT
-'''.format(SHARED_HEADER)
+""".format(SHARED_HEADER)
current_version = ''
+BACKTICK_RE = re.compile(r'`([^`]+)`(\w+)?')
+
def indent(s: str, n: int):
''' A shortcut for ``textwrap.indent`` that always uses spaces. '''
return tw_indent(s, n * ' ')
-BACKTICK_RE = re.compile(r'`([^`]+)`(\w+)?')
-
-
def escape_backticks(docstr: str) -> str:
'''
Escape backticks in a docstring by doubling them up.
@@ -958,8 +965,8 @@ def generate_docs(docs_path, domains):
f.write(domain.generate_sphinx())
-def main():
- ''' Main entry point. '''
+def selfgen():
+ '''Generate CDP types and docs for ourselves'''
here = Path(__file__).parent.resolve()
json_paths = [
here / 'browser_protocol.json',
@@ -1013,5 +1020,68 @@ def main():
py_typed_path.touch()
+def cdpgen():
+ """Generate CDP types for third-party usage."""
+ def file_type(path: str):
+ if not Path(path).is_file():
+ raise ArgumentTypeError('is not a file')
+ return path
+ parser = ArgumentParser(
+ usage='%(prog)s ',
+ description=(
+ 'Generate Python types for the Chrome Devtools Protocol (CDP) specification.\n'
+ 'JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol'
+ )
+ )
+ parser.add_argument(
+ '--browser-protocol',
+ type=file_type,
+ required=True,
+ help='JSON file for the browser protocol'
+ )
+ parser.add_argument(
+ '--js-protocol',
+ type=file_type,
+ required=True,
+ help='JSON file for the javascript protocol'
+ )
+ parser.add_argument(
+ '--output',
+ required=True,
+ help='output path for the generated Python modules'
+ )
+ args = parser.parse_args()
+ browser_proto = Path(args.browser_protocol)
+ js_proto = Path(args.js_protocol)
+ output = Path(args.output)
+ if not output.exists():
+ output.mkdir()
+ # parse the spec files
+ domains = list()
+ for json_path in (browser_proto, js_proto):
+ logger.info('Parsing JSON file %s', json_path)
+ domains.extend(parse(json_path, output))
+ domains.sort(key=operator.attrgetter('domain'))
+ # fix errors from spec files
+ for domain in domains:
+ if domain.domain == 'DOM':
+ for cmd in domain.commands:
+ if cmd.name == 'resolveNode':
+ # Patch 1
+ cmd.parameters[1].ref = 'BackendNodeId'
+ elif domain.domain == 'Page':
+ for event in domain.events:
+ if event.name == 'screencastVisibilityChanged':
+ # Patch 2
+ event.description = event.description.replace('`', '')
+ # generate python code
+ for domain in domains:
+ logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module)
+ (output / f'{domain.module}.py').write_text(domain.generate_code())
+ generate_init(output / '__init__.py', domains)
+ (output / 'README.md').write_text(GENERATED_PACKAGE_NOTICE)
+ (output / 'py.typed').touch()
+
+
if __name__ == '__main__':
- main()
+ selfgen()
diff --git a/generator/js_protocol.json b/cdpgen/js_protocol.json
similarity index 100%
rename from generator/js_protocol.json
rename to cdpgen/js_protocol.json
diff --git a/generator/test_generate.py b/cdpgen/test_generate.py
similarity index 99%
rename from generator/test_generate.py
rename to cdpgen/test_generate.py
index f2b2654..46b4ec0 100644
--- a/generator/test_generate.py
+++ b/cdpgen/test_generate.py
@@ -12,7 +12,7 @@
from textwrap import dedent
-from generate import CdpCommand, CdpDomain, CdpEvent, CdpType, docstring
+from .generate import CdpCommand, CdpDomain, CdpEvent, CdpType, docstring
def test_docstring():
diff --git a/pyproject.toml b/pyproject.toml
index b60438e..a4e808c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -25,6 +25,9 @@ sphinx = "^3.0.1"
sphinx-autodoc-typehints = "^1.10.3"
sphinx-rtd-theme = "^0.4.3"
+[tool.poetry.scripts]
+cdpgen = "cdpgen.generate:cdpgen"
+
[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
From ace1eb1a138b48131549e85064a18c39e391c866 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 22:39:37 -0300
Subject: [PATCH 31/81] update readme introduction
---
README.md | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/README.md b/README.md
index ea2d757..0429c86 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
# PyCDP
-Up to date fork of [python-chrome-devtools-protocol][1]. Currently supports CDP [r970581][2] (Chrome 97).
+Python type wrappers generator for the Chrome DevTools Protocol (CDP). Currently supports CDP [r970581][2] (Chrome 97).
## Usage
You can install this package as a dependency to use the builtin CDP types with `import cdp`, but if you want to try a different CDP version you can build new wrappers with `cdpgen` command:
@@ -44,8 +44,7 @@ flexibility, this library does not actually handle any network I/O, such as
opening a socket or negotiating a WebSocket protocol. Instead, that
responsibility is left to higher-level libraries, for example
[trio-chrome-devtools-protocol](https://github.com/hyperiongray/trio-chrome-devtools-protocol).
-
-
+
Copyright © 2018 Hyperion Gray
Copyright © 2022 Heraldo Lucena
From 8ca20596e4d1c135b4858c95c3d340c3b4decc25 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 1 Mar 2022 22:52:48 -0300
Subject: [PATCH 32/81] update cli help
---
cdpgen/generate.py | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/cdpgen/generate.py b/cdpgen/generate.py
index f602a52..930f2cf 100644
--- a/cdpgen/generate.py
+++ b/cdpgen/generate.py
@@ -1028,10 +1028,8 @@ def file_type(path: str):
return path
parser = ArgumentParser(
usage='%(prog)s ',
- description=(
- 'Generate Python types for the Chrome Devtools Protocol (CDP) specification.\n'
- 'JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol'
- )
+ description='Generate Python types for the Chrome Devtools Protocol (CDP) specification.',
+ epilog='JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol'
)
parser.add_argument(
'--browser-protocol',
From 36c5204175c80aca170333ecf3c0245725214a63 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 5 Mar 2022 18:37:19 -0300
Subject: [PATCH 33/81] update description
---
LICENSE | 1 +
README.md | 58 +++++++++++++++++++++++++++----------------------------
2 files changed, 30 insertions(+), 29 deletions(-)
diff --git a/LICENSE b/LICENSE
index 5759baa..f8a5884 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
The MIT License (MIT)
Copyright (c) 2018 Hyperion Gray
+Copyright (c) 2022 Heraldo Lucena
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 0429c86..7b606bc 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,31 @@
-# PyCDP
-Python type wrappers generator for the Chrome DevTools Protocol (CDP). Currently supports CDP [r970581][2] (Chrome 97).
+# Python CDP
+Python CDP Generator (shortened to PyCDP) is a library that provides
+Python wrappers for the types, commands, and events specified in the [Chrome
+DevTools Protocol][1]. Currently supports CDP [r970581][2] (Chrome 97).
+
+The Chrome DevTools Protocol provides for remote control of a web browser by
+sending JSON messages over a WebSocket. That JSON format is described by a
+machine-readable specification. This specification is used to automatically
+generate the classes and methods found in this library.
+
+You could write a CDP client by connecting a WebSocket and then sending JSON
+objects, but this would be tedious and error-prone: the Python interpreter would
+not catch any typos in your JSON objects, and you wouldn't get autocomplete for
+any parts of the JSON data structure. By providing a set of native Python
+wrappers, this project makes it easier and faster to write CDP client code.
+
+**This library does not perform any I/O!** In order to maximize
+flexibility, this library does not actually handle any network I/O, such as
+opening a socket or negotiating a WebSocket protocol. Instead, that
+responsibility is left to higher-level libraries, for example
+[trio-chrome-devtools-protocol][4].
## Usage
You can install this package as a dependency to use the builtin CDP types with `import cdp`, but if you want to try a different CDP version you can build new wrappers with `cdpgen` command:
```
usage: cdpgen
-Generate Python types for the Chrome Devtools Protocol API. JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol
+Generate Python types for the Chrome Devtools Protocol (CDP) specification.
optional arguments:
-h, --help show this help message and exit
@@ -16,6 +35,7 @@ optional arguments:
JSON file for the javascript protocol
--output OUTPUT output path for the generated Python modules
+JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol
```
Example:
```sh
@@ -23,32 +43,12 @@ cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json -
```
You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
-## Description
-Python Chrome DevTools Protocol (shortened to PyCDP) is a library that provides
-Python wrappers for the types, commands, and events specified in the [Chrome
-DevTools Protocol](https://github.com/ChromeDevTools/devtools-protocol/).
-
-The Chrome DevTools Protocol provides for remote control of a web browser by
-sending JSON messages over a WebSocket. That JSON format is described by a
-machine-readable specification. This specification is used to automatically
-generate the classes and methods found in this library.
-
-You could write a CDP client by connecting a WebSocket and then sending JSON
-objects, but this would be tedious and error-prone: the Python interpreter would
-not catch any typos in your JSON objects, and you wouldn't get autocomplete for
-any parts of the JSON data structure. By providing a set of native Python
-wrappers, this project makes it easier and faster to write CDP client code.
-
-**This library does not perform any I/O!** In order to maximize
-flexibility, this library does not actually handle any network I/O, such as
-opening a socket or negotiating a WebSocket protocol. Instead, that
-responsibility is left to higher-level libraries, for example
-[trio-chrome-devtools-protocol](https://github.com/hyperiongray/trio-chrome-devtools-protocol).
-
+
-Copyright © 2018 Hyperion Gray
-Copyright © 2022 Heraldo Lucena
+PyCDP is licensed under the [MIT License](./LICENSE).
-[1]: https://github.com/HyperionGray/python-chrome-devtools-protocol
-[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
\ No newline at end of file
+[1]: https://github.com/ChromeDevTools/devtools-protocol/
+[2]: https://github.com/HyperionGray/python-chrome-devtools-protocol
+[3]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
+[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
From f5b6516ba15e31a9b0b4c8be05721fd035abcdd5 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 5 Mar 2022 18:39:54 -0300
Subject: [PATCH 34/81] update description
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 7b606bc..758bfdd 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,7 @@ You can then include the `/tmp/cdp` package in your project and import it like t
-PyCDP is licensed under the [MIT License](./LICENSE).
+PyCDP is licensed under the [MIT License](LICENSE).
[1]: https://github.com/ChromeDevTools/devtools-protocol/
From c2a1e65166f4ee697418877d847c3860e386b13a Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 5 Mar 2022 22:09:01 -0300
Subject: [PATCH 35/81] fix nullbytes in docstrings
---
README.md | 2 +-
cdp/fetch.py | 4 ++--
cdpgen/generate.py | 32 +++++++++++++++-----------------
3 files changed, 18 insertions(+), 20 deletions(-)
diff --git a/README.md b/README.md
index 758bfdd..95bff42 100644
--- a/README.md
+++ b/README.md
@@ -45,7 +45,7 @@ You can then include the `/tmp/cdp` package in your project and import it like t
-PyCDP is licensed under the [MIT License](LICENSE).
+PyCDP is licensed under the MIT License.
[1]: https://github.com/ChromeDevTools/devtools-protocol/
diff --git a/cdp/fetch.py b/cdp/fetch.py
index 9c19963..0ba5296 100644
--- a/cdp/fetch.py
+++ b/cdp/fetch.py
@@ -235,7 +235,7 @@ def fulfill_request(
body: typing.Optional[str] = None,
response_phrase: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
+ r'''
Provides response to the request.
:param request_id: An id the client received in requestPaused event.
@@ -327,7 +327,7 @@ def continue_response(
response_headers: typing.Optional[typing.List[HeaderEntry]] = None,
binary_response_headers: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
+ r'''
Continues loading of the paused response, optionally modifying the
response headers. If either responseCode or headers are modified, all of them
must be present.
diff --git a/cdpgen/generate.py b/cdpgen/generate.py
index 930f2cf..bef85bb 100644
--- a/cdpgen/generate.py
+++ b/cdpgen/generate.py
@@ -95,9 +95,12 @@ def docstring(description: typing.Optional[str]) -> str:
''' Generate a docstring from a description. '''
if not description:
return ''
-
+ # if original description uses escape sequences it should be generated as a raw docstring
description = escape_backticks(description)
- return dedent("'''\n{}\n'''").format(description)
+ if '\\' in description:
+ return dedent("r'''\n{}\n'''").format(description)
+ else:
+ return dedent("'''\n{}\n'''").format(description)
def is_builtin(name: str) -> bool:
@@ -215,18 +218,18 @@ def py_annotation(self) -> str:
return ann
@classmethod
- def from_json(cls, property, domain: str) -> 'CdpProperty':
+ def from_json(cls, prop, domain: str) -> 'CdpProperty':
''' Instantiate a CDP property from a JSON object. '''
return cls(
- property['name'],
- property.get('description'),
- property.get('type'),
- property.get('$ref'),
- property.get('enum'),
- CdpItems.from_json(property['items']) if 'items' in property else None,
- property.get('optional', False),
- property.get('experimental', False),
- property.get('deprecated', False),
+ prop['name'],
+ prop.get('description'),
+ prop.get('type'),
+ prop.get('$ref'),
+ prop.get('enum'),
+ CdpItems.from_json(prop['items']) if 'items' in prop else None,
+ prop.get('optional', False),
+ prop.get('experimental', False),
+ prop.get('deprecated', False),
domain
)
@@ -975,11 +978,6 @@ def selfgen():
output_path = here.parent / 'cdp'
output_path.mkdir(exist_ok=True)
- # Remove generated code
- for subpath in output_path.iterdir():
- if subpath.is_file() and subpath.name not in ('py.typed', 'util.py'):
- subpath.unlink()
-
# Parse domains
domains = list()
for json_path in json_paths:
From e05a5c50bdb868b0cfcd05c7eef8f11d87dd8800 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 29 Mar 2022 12:55:59 -0300
Subject: [PATCH 36/81] fix pyproject.toml and complete readme
---
README.md | 30 ++++++++++++++++++++++++++++++
pyproject.toml | 18 +++++++++++++-----
2 files changed, 43 insertions(+), 5 deletions(-)
diff --git a/README.md b/README.md
index 95bff42..1a9fccd 100644
--- a/README.md
+++ b/README.md
@@ -20,6 +20,14 @@ opening a socket or negotiating a WebSocket protocol. Instead, that
responsibility is left to higher-level libraries, for example
[trio-chrome-devtools-protocol][4].
+## Installation
+You can install this library as a dependency on your project with:
+```
+pip install git+https://github.com/HMaker/python-cdp.git@1.0.0
+```
+Change the git tag `@1.0.0` if you need another version. To install for development, clone this
+repository, install [Poetry][5] package manager and run `poetry install` to install dependencies.
+
## Usage
You can install this package as a dependency to use the builtin CDP types with `import cdp`, but if you want to try a different CDP version you can build new wrappers with `cdpgen` command:
```
@@ -43,6 +51,27 @@ cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json -
```
You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
+## Implementation of a CDP client
+The `cdp` package follows same structure of CDP domains, each domain is a Python class and each command
+a method of that class.
+
+Each method is a generator function with a single yield which is a Python dict, on the CDP wire format,
+containing the message that should be sent to the browser:
+```python
+import cdp
+
+# Get all CDP targets
+command = cdp.target.get_targets() # this is a generator
+raw_cdp_request = next(command) # receive the yield
+raw_cdp_response = send_cdp_request(raw_cdp_request) # you implement send_cdp_request, raw_cdp_request is the JSON object that should be sent to browser
+try:
+ command.send(raw_cdp_response) # send the response to the generator where raw_cdp_response is the JSON object received from browser, it will raise StopIteration
+ raise RuntimeError("the generator didnt exit!") # this shouldnt happen
+except StopIteration as result:
+ response = result.value # the parsed response to Target.get_targets() command
+print(response)
+```
+
PyCDP is licensed under the MIT License.
@@ -52,3 +81,4 @@ PyCDP is licensed under the MIT License.
[2]: https://github.com/HyperionGray/python-chrome-devtools-protocol
[3]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
+[5]: https://python-poetry.org/docs/
diff --git a/pyproject.toml b/pyproject.toml
index a4e808c..fbe2551 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,21 +1,29 @@
[tool.poetry]
name = "chrome-devtools-protocol"
-packages = [{include = "cdp"}]
-version = "0.5.0"
+version = "1.0.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
-authors = ["Mark E. Haase ", "Brian Mackintosh "]
+packages = [
+ {include = "cdp"},
+ {include = "cdpgen"}
+]
license = "MIT"
readme = "README.md"
-homepage = "https://github.com/hyperiongray/python-chrome-devtools-protocol"
+homepage = "https://github.com/HMaker/python-cdp"
classifiers = [
+ "Programming Language :: Python :: 3 :: Only",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Internet"
]
+authors = [
+ "Mark E. Haase ",
+ "Brian Mackintosh ",
+ "Heraldo Lucena "
+]
[tool.poetry.dependencies]
python = "^3.7"
-deprecated = "^1.2.9"
+deprecated = "1.2.9"
[tool.poetry.dev-dependencies]
inflection = "^0.4.0"
From 8be9977b03b0ab2423fce176fab0e2361d7cb234 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 29 Mar 2022 13:00:21 -0300
Subject: [PATCH 37/81] update dependencies lock
---
poetry.lock | 678 ++++++++++++++++++++++++++++------------------------
1 file changed, 371 insertions(+), 307 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 2b95fae..1b4493b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,77 +1,78 @@
[[package]]
-category = "dev"
-description = "A configurable sidebar-enabled Sphinx theme"
name = "alabaster"
+version = "0.7.12"
+description = "A configurable sidebar-enabled Sphinx theme"
+category = "dev"
optional = false
python-versions = "*"
-version = "0.7.12"
[[package]]
-category = "dev"
-description = "Atomic file writes."
-marker = "sys_platform == \"win32\""
name = "atomicwrites"
+version = "1.4.0"
+description = "Atomic file writes."
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "1.3.0"
[[package]]
-category = "dev"
-description = "Classes Without Boilerplate"
name = "attrs"
+version = "21.4.0"
+description = "Classes Without Boilerplate"
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "19.3.0"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
-azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
-dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
-docs = ["sphinx", "zope.interface"]
-tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
+dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
+docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
+tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
+tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
-category = "dev"
-description = "Internationalization utilities"
name = "babel"
+version = "2.9.1"
+description = "Internationalization utilities"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "2.8.0"
[package.dependencies]
pytz = ">=2015.7"
[[package]]
-category = "dev"
-description = "Python package for providing Mozilla's CA Bundle."
name = "certifi"
+version = "2021.10.8"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "dev"
optional = false
python-versions = "*"
-version = "2020.4.5.1"
[[package]]
+name = "charset-normalizer"
+version = "2.0.12"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
-description = "Universal encoding detector for Python 2 and 3"
-name = "chardet"
optional = false
-python-versions = "*"
-version = "3.0.4"
+python-versions = ">=3.5.0"
+
+[package.extras]
+unicode_backport = ["unicodedata2"]
[[package]]
-category = "dev"
-description = "Cross-platform colored terminal text."
-marker = "sys_platform == \"win32\""
name = "colorama"
+version = "0.4.4"
+description = "Cross-platform colored terminal text."
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "0.4.3"
[[package]]
-category = "main"
-description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
name = "deprecated"
+version = "1.2.9"
+description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "1.2.9"
[package.dependencies]
wrapt = ">=1.10,<2"
@@ -80,90 +81,91 @@ wrapt = ">=1.10,<2"
dev = ["tox", "bumpversion (<1)", "sphinx (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
[[package]]
-category = "dev"
-description = "Docutils -- Python Documentation Utilities"
name = "docutils"
+version = "0.16"
+description = "Docutils -- Python Documentation Utilities"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "0.16"
[[package]]
-category = "dev"
-description = "Internationalized Domain Names in Applications (IDNA)"
name = "idna"
+version = "3.3"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "2.9"
+python-versions = ">=3.5"
[[package]]
-category = "dev"
-description = "Getting image size from png/jpeg/jpeg2000/gif file"
name = "imagesize"
+version = "1.3.0"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "1.2.0"
[[package]]
-category = "dev"
-description = "Read metadata from Python packages"
-marker = "python_version < \"3.8\""
name = "importlib-metadata"
+version = "4.11.3"
+description = "Read metadata from Python packages"
+category = "dev"
optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-version = "1.6.0"
+python-versions = ">=3.7"
[package.dependencies]
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
-docs = ["sphinx", "rst.linker"]
-testing = ["packaging", "importlib-resources"]
+docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
+perf = ["ipython"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
[[package]]
-category = "dev"
-description = "A port of Ruby on Rails inflector to Python"
name = "inflection"
+version = "0.4.0"
+description = "A port of Ruby on Rails inflector to Python"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "0.4.0"
[[package]]
-category = "dev"
-description = "A very fast and expressive template engine."
name = "jinja2"
+version = "3.1.1"
+description = "A very fast and expressive template engine."
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "2.11.2"
+python-versions = ">=3.7"
[package.dependencies]
-MarkupSafe = ">=0.23"
+MarkupSafe = ">=2.0"
[package.extras]
-i18n = ["Babel (>=0.8)"]
+i18n = ["Babel (>=2.7)"]
[[package]]
-category = "dev"
-description = "Safely add untrusted strings to HTML/XML markup."
name = "markupsafe"
+version = "2.1.1"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "dev"
optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
-version = "1.1.1"
+python-versions = ">=3.7"
[[package]]
-category = "dev"
-description = "More routines for operating on iterables, beyond itertools"
name = "more-itertools"
+version = "8.12.0"
+description = "More routines for operating on iterables, beyond itertools"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "8.2.0"
[[package]]
-category = "dev"
-description = "Optional static typing for Python"
name = "mypy"
+version = "0.770"
+description = "Optional static typing for Python"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "0.770"
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
@@ -174,152 +176,140 @@ typing-extensions = ">=3.7.4"
dmypy = ["psutil (>=4.0)"]
[[package]]
-category = "dev"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
name = "mypy-extensions"
+version = "0.4.3"
+description = "Experimental type system extensions for programs checked with the mypy typechecker."
+category = "dev"
optional = false
python-versions = "*"
-version = "0.4.3"
[[package]]
-category = "dev"
-description = "Core utilities for Python packages"
name = "packaging"
+version = "21.3"
+description = "Core utilities for Python packages"
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "20.3"
+python-versions = ">=3.6"
[package.dependencies]
-pyparsing = ">=2.0.2"
-six = "*"
+pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
-category = "dev"
-description = "plugin and hook calling mechanisms for python"
name = "pluggy"
+version = "0.13.1"
+description = "plugin and hook calling mechanisms for python"
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "0.13.1"
[package.dependencies]
-[package.dependencies.importlib-metadata]
-python = "<3.8"
-version = ">=0.12"
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
[package.extras]
dev = ["pre-commit", "tox"]
[[package]]
-category = "dev"
-description = "library with cross-python path, ini-parsing, io, code, log facilities"
name = "py"
+version = "1.11.0"
+description = "library with cross-python path, ini-parsing, io, code, log facilities"
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-version = "1.8.1"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
-category = "dev"
-description = "Pygments is a syntax highlighting package written in Python."
name = "pygments"
+version = "2.11.2"
+description = "Pygments is a syntax highlighting package written in Python."
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "2.6.1"
[[package]]
-category = "dev"
-description = "Python parsing module"
name = "pyparsing"
+version = "3.0.7"
+description = "Python parsing module"
+category = "dev"
optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
-version = "2.4.7"
+python-versions = ">=3.6"
+
+[package.extras]
+diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
-category = "dev"
-description = "pytest: simple powerful testing with Python"
name = "pytest"
+version = "5.4.3"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "5.4.1"
[package.dependencies]
-atomicwrites = ">=1.0"
+atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=17.4.0"
-colorama = "*"
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
more-itertools = ">=4.0.0"
packaging = "*"
pluggy = ">=0.12,<1.0"
py = ">=1.5.0"
wcwidth = "*"
-[package.dependencies.importlib-metadata]
-python = "<3.8"
-version = ">=0.12"
-
[package.extras]
-checkqa-mypy = ["mypy (v0.761)"]
+checkqa-mypy = ["mypy (==v0.761)"]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
[[package]]
-category = "dev"
-description = "World timezone definitions, modern and historical"
name = "pytz"
+version = "2022.1"
+description = "World timezone definitions, modern and historical"
+category = "dev"
optional = false
python-versions = "*"
-version = "2019.3"
[[package]]
-category = "dev"
-description = "Python HTTP for Humans."
name = "requests"
+version = "2.27.1"
+description = "Python HTTP for Humans."
+category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-version = "2.23.0"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.dependencies]
certifi = ">=2017.4.17"
-chardet = ">=3.0.2,<4"
-idna = ">=2.5,<3"
-urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
+charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
+idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+urllib3 = ">=1.21.1,<1.27"
[package.extras]
-security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
-socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
[[package]]
-category = "dev"
-description = "Python 2 and 3 compatibility utilities"
-name = "six"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-version = "1.14.0"
-
-[[package]]
-category = "dev"
-description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms."
name = "snowballstemmer"
+version = "2.2.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+category = "dev"
optional = false
python-versions = "*"
-version = "2.0.0"
[[package]]
-category = "dev"
-description = "Python documentation generator"
name = "sphinx"
+version = "3.5.4"
+description = "Python documentation generator"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "3.0.1"
[package.dependencies]
-Jinja2 = ">=2.3"
-Pygments = ">=2.0"
alabaster = ">=0.7,<0.8"
babel = ">=1.3"
-colorama = ">=0.3.5"
-docutils = ">=0.12"
+colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.12,<0.17"
imagesize = "*"
+Jinja2 = ">=2.3"
packaging = "*"
+Pygments = ">=2.0"
requests = ">=2.5.0"
-setuptools = "*"
snowballstemmer = ">=1.1"
sphinxcontrib-applehelp = "*"
sphinxcontrib-devhelp = "*"
@@ -330,167 +320,167 @@ sphinxcontrib-serializinghtml = "*"
[package.extras]
docs = ["sphinxcontrib-websupport"]
-lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.770)", "docutils-stubs"]
-test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"]
+lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"]
+test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"]
[[package]]
-category = "dev"
-description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
name = "sphinx-autodoc-typehints"
+version = "1.12.0"
+description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
+category = "dev"
optional = false
-python-versions = ">=3.5.2"
-version = "1.10.3"
+python-versions = ">=3.6"
[package.dependencies]
-Sphinx = ">=2.1"
+Sphinx = ">=3.0"
[package.extras]
-test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "dataclasses"]
+test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "Sphinx (>=3.2.0)", "dataclasses"]
type_comments = ["typed-ast (>=1.4.0)"]
[[package]]
-category = "dev"
-description = "Read the Docs theme for Sphinx"
name = "sphinx-rtd-theme"
+version = "0.4.3"
+description = "Read the Docs theme for Sphinx"
+category = "dev"
optional = false
python-versions = "*"
-version = "0.4.3"
[package.dependencies]
sphinx = "*"
[[package]]
-category = "dev"
-description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
name = "sphinxcontrib-applehelp"
+version = "1.0.2"
+description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "1.0.2"
[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
-description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
name = "sphinxcontrib-devhelp"
+version = "1.0.2"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "1.0.2"
[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
-description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
name = "sphinxcontrib-htmlhelp"
+version = "2.0.0"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+category = "dev"
optional = false
-python-versions = ">=3.5"
-version = "1.0.3"
+python-versions = ">=3.6"
[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest", "html5lib"]
[[package]]
-category = "dev"
-description = "A sphinx extension which renders display math in HTML via JavaScript"
name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "1.0.1"
[package.extras]
test = ["pytest", "flake8", "mypy"]
[[package]]
-category = "dev"
-description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
name = "sphinxcontrib-qthelp"
+version = "1.0.3"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "1.0.3"
[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
-description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
name = "sphinxcontrib-serializinghtml"
+version = "1.1.5"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
+category = "dev"
optional = false
python-versions = ">=3.5"
-version = "1.1.4"
[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]
[[package]]
-category = "dev"
-description = "a fork of Python 2 and 3 ast modules with type comment support"
name = "typed-ast"
+version = "1.4.3"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
optional = false
python-versions = "*"
-version = "1.4.1"
[[package]]
-category = "dev"
-description = "Backported and Experimental Type Hints for Python 3.5+"
name = "typing-extensions"
+version = "4.1.1"
+description = "Backported and Experimental Type Hints for Python 3.6+"
+category = "dev"
optional = false
-python-versions = "*"
-version = "3.7.4.2"
+python-versions = ">=3.6"
[[package]]
-category = "dev"
-description = "HTTP library with thread-safe connection pooling, file post, and more."
name = "urllib3"
+version = "1.26.9"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
-version = "1.25.8"
[package.extras]
-brotli = ["brotlipy (>=0.6.0)"]
+brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
-socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
-category = "dev"
-description = "Measures number of Terminal column cells of wide-character codes"
name = "wcwidth"
+version = "0.2.5"
+description = "Measures the displayed width of unicode strings in a terminal"
+category = "dev"
optional = false
python-versions = "*"
-version = "0.1.9"
[[package]]
-category = "main"
-description = "Module for decorators, wrappers and monkey patching."
name = "wrapt"
+version = "1.14.0"
+description = "Module for decorators, wrappers and monkey patching."
+category = "main"
optional = false
-python-versions = "*"
-version = "1.12.1"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
-category = "dev"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-marker = "python_version < \"3.8\""
name = "zipp"
+version = "3.7.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "dev"
optional = false
-python-versions = ">=3.6"
-version = "3.1.0"
+python-versions = ">=3.7"
[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
-testing = ["jaraco.itertools", "func-timeout"]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
[metadata]
-content-hash = "fbfb43b8fe09bf1ef0e1bffc589aee6bb49cacfb0996a0a3cfc397e801b04b56"
+lock-version = "1.1"
python-versions = "^3.7"
+content-hash = "b65e840fe4a21615e47100d36020e049ad5c2fd70e708c1e651583d5f4613c52"
[metadata.files]
alabaster = [
@@ -498,28 +488,28 @@ alabaster = [
{file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
]
atomicwrites = [
- {file = "atomicwrites-1.3.0-py2.py3-none-any.whl", hash = "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4"},
- {file = "atomicwrites-1.3.0.tar.gz", hash = "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"},
+ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
+ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
- {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
- {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
+ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
+ {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
babel = [
- {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"},
- {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"},
+ {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"},
+ {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"},
]
certifi = [
- {file = "certifi-2020.4.5.1-py2.py3-none-any.whl", hash = "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304"},
- {file = "certifi-2020.4.5.1.tar.gz", hash = "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"},
+ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
+ {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
]
-chardet = [
- {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
- {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
+charset-normalizer = [
+ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
+ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
]
colorama = [
- {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
- {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
+ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
+ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
deprecated = [
{file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"},
@@ -530,63 +520,70 @@ docutils = [
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
]
idna = [
- {file = "idna-2.9-py2.py3-none-any.whl", hash = "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"},
- {file = "idna-2.9.tar.gz", hash = "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"},
+ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
+ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
imagesize = [
- {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"},
- {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"},
+ {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"},
+ {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"},
]
importlib-metadata = [
- {file = "importlib_metadata-1.6.0-py2.py3-none-any.whl", hash = "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f"},
- {file = "importlib_metadata-1.6.0.tar.gz", hash = "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e"},
+ {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"},
+ {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"},
]
inflection = [
{file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"},
{file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"},
]
jinja2 = [
- {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"},
- {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"},
+ {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"},
+ {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"},
]
markupsafe = [
- {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"},
- {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"},
- {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"},
- {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"},
- {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"},
- {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"},
- {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"},
- {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"},
- {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"},
- {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"},
- {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"},
- {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"},
- {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"},
- {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"},
- {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"},
- {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"},
- {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"},
- {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"},
- {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"},
- {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"},
- {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"},
- {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"},
- {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"},
- {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"},
- {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"},
- {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"},
- {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"},
- {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"},
- {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"},
- {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"},
- {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"},
- {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"},
- {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
+ {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
+ {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
+ {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
+ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
+ {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
]
more-itertools = [
- {file = "more-itertools-8.2.0.tar.gz", hash = "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"},
- {file = "more_itertools-8.2.0-py3-none-any.whl", hash = "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c"},
+ {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
+ {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
]
mypy = [
{file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
@@ -609,52 +606,48 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
- {file = "packaging-20.3-py2.py3-none-any.whl", hash = "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"},
- {file = "packaging-20.3.tar.gz", hash = "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3"},
+ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
+ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
py = [
- {file = "py-1.8.1-py2.py3-none-any.whl", hash = "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"},
- {file = "py-1.8.1.tar.gz", hash = "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa"},
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pygments = [
- {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"},
- {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"},
+ {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
+ {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
]
pyparsing = [
- {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
- {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
+ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
+ {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
]
pytest = [
- {file = "pytest-5.4.1-py3-none-any.whl", hash = "sha256:0e5b30f5cb04e887b91b1ee519fa3d89049595f428c1db76e73bd7f17b09b172"},
- {file = "pytest-5.4.1.tar.gz", hash = "sha256:84dde37075b8805f3d1f392cc47e38a0e59518fb46a431cfdaf7cf1ce805f970"},
+ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
+ {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
]
pytz = [
- {file = "pytz-2019.3-py2.py3-none-any.whl", hash = "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d"},
- {file = "pytz-2019.3.tar.gz", hash = "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"},
+ {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
+ {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
]
requests = [
- {file = "requests-2.23.0-py2.py3-none-any.whl", hash = "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee"},
- {file = "requests-2.23.0.tar.gz", hash = "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6"},
-]
-six = [
- {file = "six-1.14.0-py2.py3-none-any.whl", hash = "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"},
- {file = "six-1.14.0.tar.gz", hash = "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a"},
+ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
+ {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
]
snowballstemmer = [
- {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"},
- {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"},
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
]
sphinx = [
- {file = "Sphinx-3.0.1-py3-none-any.whl", hash = "sha256:8411878f4768ec2a8896b844d68070204f9354a831b37937989c2e559d29dffc"},
- {file = "Sphinx-3.0.1.tar.gz", hash = "sha256:50972d83b78990fd61d0d3fe8620814cae53db29443e92c13661bc43dff46ec8"},
+ {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"},
+ {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"},
]
sphinx-autodoc-typehints = [
- {file = "sphinx-autodoc-typehints-1.10.3.tar.gz", hash = "sha256:a6b3180167479aca2c4d1ed3b5cb044a70a76cccd6b38662d39288ebd9f0dff0"},
- {file = "sphinx_autodoc_typehints-1.10.3-py3-none-any.whl", hash = "sha256:27c9e6ef4f4451766ab8d08b2d8520933b97beb21c913f3df9ab2e59b56e6c6c"},
+ {file = "sphinx-autodoc-typehints-1.12.0.tar.gz", hash = "sha256:193617d9dbe0847281b1399d369e74e34cd959c82e02c7efde077fca908a9f52"},
+ {file = "sphinx_autodoc_typehints-1.12.0-py3-none-any.whl", hash = "sha256:5e81776ec422dd168d688ab60f034fccfafbcd94329e9537712c93003bddc04a"},
]
sphinx-rtd-theme = [
{file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"},
@@ -669,8 +662,8 @@ sphinxcontrib-devhelp = [
{file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
]
sphinxcontrib-htmlhelp = [
- {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"},
- {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"},
+ {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"},
+ {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"},
]
sphinxcontrib-jsmath = [
{file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
@@ -681,49 +674,120 @@ sphinxcontrib-qthelp = [
{file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
]
sphinxcontrib-serializinghtml = [
- {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"},
- {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"},
+ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
+ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
]
typed-ast = [
- {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
- {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
- {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"},
- {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"},
- {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"},
- {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"},
- {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"},
- {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"},
- {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"},
- {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"},
- {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"},
- {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"},
- {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"},
- {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"},
- {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"},
- {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"},
- {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"},
- {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"},
- {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"},
- {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"},
- {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
+ {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
+ {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
+ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
]
typing-extensions = [
- {file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
- {file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
- {file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
+ {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
+ {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
]
urllib3 = [
- {file = "urllib3-1.25.8-py2.py3-none-any.whl", hash = "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc"},
- {file = "urllib3-1.25.8.tar.gz", hash = "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"},
+ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
+ {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
]
wcwidth = [
- {file = "wcwidth-0.1.9-py2.py3-none-any.whl", hash = "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1"},
- {file = "wcwidth-0.1.9.tar.gz", hash = "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1"},
+ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
+ {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
]
wrapt = [
- {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
+ {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"},
+ {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"},
+ {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"},
+ {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"},
+ {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"},
+ {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"},
+ {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"},
+ {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"},
+ {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"},
+ {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"},
+ {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"},
+ {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"},
+ {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"},
+ {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"},
+ {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"},
+ {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"},
+ {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"},
+ {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"},
+ {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"},
+ {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"},
+ {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"},
+ {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"},
+ {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"},
+ {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"},
+ {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"},
+ {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"},
+ {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"},
+ {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"},
+ {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"},
+ {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"},
+ {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"},
+ {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"},
+ {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"},
+ {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"},
+ {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"},
+ {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"},
+ {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"},
+ {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"},
+ {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"},
+ {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"},
+ {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"},
+ {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"},
+ {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"},
+ {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"},
+ {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"},
+ {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"},
+ {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"},
+ {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"},
+ {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"},
+ {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"},
+ {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"},
+ {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"},
+ {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"},
+ {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"},
+ {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"},
+ {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"},
+ {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"},
+ {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"},
+ {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"},
+ {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"},
+ {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"},
+ {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"},
+ {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"},
+ {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"},
]
zipp = [
- {file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"},
- {file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"},
+ {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"},
+ {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"},
]
From c4de658767f6914c8904f65ccaac390645ff76a4 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 29 Mar 2022 13:08:24 -0300
Subject: [PATCH 38/81] add inflection dep as required
---
poetry.lock | 4 ++--
pyproject.toml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 1b4493b..9d15188 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -125,7 +125,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
name = "inflection"
version = "0.4.0"
description = "A port of Ruby on Rails inflector to Python"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.5"
@@ -480,7 +480,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
-content-hash = "b65e840fe4a21615e47100d36020e049ad5c2fd70e708c1e651583d5f4613c52"
+content-hash = "2b3d1f81cf2afe7ae04cb7fc53374f4018d8aea8239e2d3c7a859453ec94c376"
[metadata.files]
alabaster = [
diff --git a/pyproject.toml b/pyproject.toml
index fbe2551..8b40b7b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -24,9 +24,9 @@ authors = [
[tool.poetry.dependencies]
python = "^3.7"
deprecated = "1.2.9"
+inflection = "0.4.0"
[tool.poetry.dev-dependencies]
-inflection = "^0.4.0"
mypy = "^0.770"
pytest = "^5.4.1"
sphinx = "^3.0.1"
From 0a9956e7735ca89a34c4612f5b7df610371aae6f Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 29 Mar 2022 13:14:27 -0300
Subject: [PATCH 39/81] fix wrong link on readme
---
README.md | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 1a9fccd..128aa9c 100644
--- a/README.md
+++ b/README.md
@@ -78,7 +78,6 @@ PyCDP is licensed under the MIT License.
[1]: https://github.com/ChromeDevTools/devtools-protocol/
-[2]: https://github.com/HyperionGray/python-chrome-devtools-protocol
-[3]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
+[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
[5]: https://python-poetry.org/docs/
From b584935c16f7799956c467f1f68e2414da76f9d9 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 29 Mar 2022 13:21:45 -0300
Subject: [PATCH 40/81] add link to docs on readme
---
README.md | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 128aa9c..f69af87 100644
--- a/README.md
+++ b/README.md
@@ -56,7 +56,7 @@ The `cdp` package follows same structure of CDP domains, each domain is a Python
a method of that class.
Each method is a generator function with a single yield which is a Python dict, on the CDP wire format,
-containing the message that should be sent to the browser:
+containing the message that should be sent to the browser, on resumption the generator receives the message from browser:
```python
import cdp
@@ -71,6 +71,7 @@ except StopIteration as result:
response = result.value # the parsed response to Target.get_targets() command
print(response)
```
+For implementation details check out the [docs][3].
@@ -79,5 +80,6 @@ PyCDP is licensed under the MIT License.
[1]: https://github.com/ChromeDevTools/devtools-protocol/
[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
+[3]: docs/getting_started.rst
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
[5]: https://python-poetry.org/docs/
From d66201e76839b91d100f5bee51636f1980b19509 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Tue, 12 Apr 2022 22:43:23 -0300
Subject: [PATCH 41/81] set Network.Cookie.expires as optional
---
cdpgen/generate.py | 60 +++++++++++++++++++++++-----------------------
1 file changed, 30 insertions(+), 30 deletions(-)
diff --git a/cdpgen/generate.py b/cdpgen/generate.py
index bef85bb..6ebf261 100644
--- a/cdpgen/generate.py
+++ b/cdpgen/generate.py
@@ -284,7 +284,7 @@ def generate_from_json(self, dict_) -> str:
expr = CdpPrimitiveType.get_constructor(self.type,
f"{dict_}['{self.name}']")
if self.optional:
- expr = f"{expr} if '{self.name}' in {dict_} else None"
+ expr = f"{expr} if {dict_}.get('{self.name}', None) is not None else None"
return expr
@@ -968,6 +968,33 @@ def generate_docs(docs_path, domains):
f.write(domain.generate_sphinx())
+def fix_protocol_spec(domains):
+ """Fixes following errors in the official CDP spec:
+ 1. DOM includes an erroneous $ref that refers to itself.
+ 2. Page includes an event with an extraneous backtick in the description.
+ 3. Network.Cookie.expires is optional because sometimes its value can be null."""
+ for domain in domains:
+ if domain.domain == 'DOM':
+ for cmd in domain.commands:
+ if cmd.name == 'resolveNode':
+ # Patch 1
+ cmd.parameters[1].ref = 'BackendNodeId'
+ break
+ elif domain.domain == 'Page':
+ for event in domain.events:
+ if event.name == 'screencastVisibilityChanged':
+ # Patch 2
+ event.description = event.description.replace('`', '')
+ break
+ elif domain.domain == 'Network':
+ for _type in domain.types:
+ if _type.id == 'Cookie':
+ for prop in _type.properties:
+ if prop.name == 'expires':
+ prop.optional = True
+ break
+
+
def selfgen():
'''Generate CDP types and docs for ourselves'''
here = Path(__file__).parent.resolve()
@@ -984,23 +1011,7 @@ def selfgen():
logger.info('Parsing JSON file %s', json_path)
domains.extend(parse(json_path, output_path))
domains.sort(key=operator.attrgetter('domain'))
-
- # Patch up CDP errors. It's easier to patch that here than it is to modify
- # the generator code.
- # 1. DOM includes an erroneous $ref that refers to itself.
- # 2. Page includes an event with an extraneous backtick in the description.
- for domain in domains:
- if domain.domain == 'DOM':
- for cmd in domain.commands:
- if cmd.name == 'resolveNode':
- # Patch 1
- cmd.parameters[1].ref = 'BackendNodeId'
- elif domain.domain == 'Page':
- for event in domain.events:
- if event.name == 'screencastVisibilityChanged':
- # Patch 2
- event.description = event.description.replace('`', '')
-
+ fix_protocol_spec(domains)
for domain in domains:
logger.info('Generating module: %s → %s.py', domain.domain,
domain.module)
@@ -1058,18 +1069,7 @@ def file_type(path: str):
logger.info('Parsing JSON file %s', json_path)
domains.extend(parse(json_path, output))
domains.sort(key=operator.attrgetter('domain'))
- # fix errors from spec files
- for domain in domains:
- if domain.domain == 'DOM':
- for cmd in domain.commands:
- if cmd.name == 'resolveNode':
- # Patch 1
- cmd.parameters[1].ref = 'BackendNodeId'
- elif domain.domain == 'Page':
- for event in domain.events:
- if event.name == 'screencastVisibilityChanged':
- # Patch 2
- event.description = event.description.replace('`', '')
+ fix_protocol_spec(domains)
# generate python code
for domain in domains:
logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module)
From a0a9efa9c4fac05372087c22efc05a75d79c5d1e Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Wed, 27 Apr 2022 13:05:45 -0300
Subject: [PATCH 42/81] add CDP client implementation for asyncio
---
README.md | 34 +-
poetry.lock | 373 ++++++++++++++++-
{cdpgen => pycdp}/__init__.py | 0
pycdp/asyncio.py | 429 ++++++++++++++++++++
{cdp => pycdp/cdp}/__init__.py | 0
{cdp => pycdp/cdp}/accessibility.py | 0
{cdp => pycdp/cdp}/animation.py | 0
{cdp => pycdp/cdp}/audits.py | 0
{cdp => pycdp/cdp}/background_service.py | 0
{cdp => pycdp/cdp}/browser.py | 0
{cdp => pycdp/cdp}/cache_storage.py | 0
{cdp => pycdp/cdp}/cast.py | 0
{cdp => pycdp/cdp}/console.py | 0
{cdp => pycdp/cdp}/css.py | 0
{cdp => pycdp/cdp}/database.py | 0
{cdp => pycdp/cdp}/debugger.py | 0
{cdp => pycdp/cdp}/device_orientation.py | 0
{cdp => pycdp/cdp}/dom.py | 0
{cdp => pycdp/cdp}/dom_debugger.py | 0
{cdp => pycdp/cdp}/dom_snapshot.py | 0
{cdp => pycdp/cdp}/dom_storage.py | 0
{cdp => pycdp/cdp}/emulation.py | 0
{cdp => pycdp/cdp}/event_breakpoints.py | 0
{cdp => pycdp/cdp}/fetch.py | 0
{cdp => pycdp/cdp}/headless_experimental.py | 0
{cdp => pycdp/cdp}/heap_profiler.py | 0
{cdp => pycdp/cdp}/indexed_db.py | 0
{cdp => pycdp/cdp}/input_.py | 0
{cdp => pycdp/cdp}/inspector.py | 0
{cdp => pycdp/cdp}/io.py | 0
{cdp => pycdp/cdp}/layer_tree.py | 0
{cdp => pycdp/cdp}/log.py | 0
{cdp => pycdp/cdp}/media.py | 0
{cdp => pycdp/cdp}/memory.py | 0
{cdp => pycdp/cdp}/network.py | 0
{cdp => pycdp/cdp}/overlay.py | 0
{cdp => pycdp/cdp}/page.py | 0
{cdp => pycdp/cdp}/performance.py | 0
{cdp => pycdp/cdp}/performance_timeline.py | 0
{cdp => pycdp/cdp}/profiler.py | 0
{cdp => pycdp/cdp}/py.typed | 0
{cdp => pycdp/cdp}/runtime.py | 0
{cdp => pycdp/cdp}/schema.py | 0
{cdp => pycdp/cdp}/security.py | 0
{cdp => pycdp/cdp}/service_worker.py | 0
{cdp => pycdp/cdp}/storage.py | 0
{cdp => pycdp/cdp}/system_info.py | 0
{cdp => pycdp/cdp}/target.py | 0
{cdp => pycdp/cdp}/tethering.py | 0
{cdp => pycdp/cdp}/tracing.py | 0
{cdp => pycdp/cdp}/util.py | 0
{cdp => pycdp/cdp}/web_audio.py | 0
{cdp => pycdp/cdp}/web_authn.py | 0
{cdpgen => pycdp/gen}/README.md | 0
pycdp/gen/__init__.py | 0
{cdpgen => pycdp/gen}/browser_protocol.json | 0
{cdpgen => pycdp/gen}/generate.py | 0
{cdpgen => pycdp/gen}/js_protocol.json | 0
{cdpgen => pycdp/gen}/test_generate.py | 0
pycdp/utils.py | 367 +++++++++++++++++
pyproject.toml | 10 +-
61 files changed, 1193 insertions(+), 20 deletions(-)
rename {cdpgen => pycdp}/__init__.py (100%)
create mode 100644 pycdp/asyncio.py
rename {cdp => pycdp/cdp}/__init__.py (100%)
rename {cdp => pycdp/cdp}/accessibility.py (100%)
rename {cdp => pycdp/cdp}/animation.py (100%)
rename {cdp => pycdp/cdp}/audits.py (100%)
rename {cdp => pycdp/cdp}/background_service.py (100%)
rename {cdp => pycdp/cdp}/browser.py (100%)
rename {cdp => pycdp/cdp}/cache_storage.py (100%)
rename {cdp => pycdp/cdp}/cast.py (100%)
rename {cdp => pycdp/cdp}/console.py (100%)
rename {cdp => pycdp/cdp}/css.py (100%)
rename {cdp => pycdp/cdp}/database.py (100%)
rename {cdp => pycdp/cdp}/debugger.py (100%)
rename {cdp => pycdp/cdp}/device_orientation.py (100%)
rename {cdp => pycdp/cdp}/dom.py (100%)
rename {cdp => pycdp/cdp}/dom_debugger.py (100%)
rename {cdp => pycdp/cdp}/dom_snapshot.py (100%)
rename {cdp => pycdp/cdp}/dom_storage.py (100%)
rename {cdp => pycdp/cdp}/emulation.py (100%)
rename {cdp => pycdp/cdp}/event_breakpoints.py (100%)
rename {cdp => pycdp/cdp}/fetch.py (100%)
rename {cdp => pycdp/cdp}/headless_experimental.py (100%)
rename {cdp => pycdp/cdp}/heap_profiler.py (100%)
rename {cdp => pycdp/cdp}/indexed_db.py (100%)
rename {cdp => pycdp/cdp}/input_.py (100%)
rename {cdp => pycdp/cdp}/inspector.py (100%)
rename {cdp => pycdp/cdp}/io.py (100%)
rename {cdp => pycdp/cdp}/layer_tree.py (100%)
rename {cdp => pycdp/cdp}/log.py (100%)
rename {cdp => pycdp/cdp}/media.py (100%)
rename {cdp => pycdp/cdp}/memory.py (100%)
rename {cdp => pycdp/cdp}/network.py (100%)
rename {cdp => pycdp/cdp}/overlay.py (100%)
rename {cdp => pycdp/cdp}/page.py (100%)
rename {cdp => pycdp/cdp}/performance.py (100%)
rename {cdp => pycdp/cdp}/performance_timeline.py (100%)
rename {cdp => pycdp/cdp}/profiler.py (100%)
rename {cdp => pycdp/cdp}/py.typed (100%)
rename {cdp => pycdp/cdp}/runtime.py (100%)
rename {cdp => pycdp/cdp}/schema.py (100%)
rename {cdp => pycdp/cdp}/security.py (100%)
rename {cdp => pycdp/cdp}/service_worker.py (100%)
rename {cdp => pycdp/cdp}/storage.py (100%)
rename {cdp => pycdp/cdp}/system_info.py (100%)
rename {cdp => pycdp/cdp}/target.py (100%)
rename {cdp => pycdp/cdp}/tethering.py (100%)
rename {cdp => pycdp/cdp}/tracing.py (100%)
rename {cdp => pycdp/cdp}/util.py (100%)
rename {cdp => pycdp/cdp}/web_audio.py (100%)
rename {cdp => pycdp/cdp}/web_authn.py (100%)
rename {cdpgen => pycdp/gen}/README.md (100%)
create mode 100644 pycdp/gen/__init__.py
rename {cdpgen => pycdp/gen}/browser_protocol.json (100%)
rename {cdpgen => pycdp/gen}/generate.py (100%)
rename {cdpgen => pycdp/gen}/js_protocol.json (100%)
rename {cdpgen => pycdp/gen}/test_generate.py (100%)
create mode 100644 pycdp/utils.py
diff --git a/README.md b/README.md
index f69af87..0b2de16 100644
--- a/README.md
+++ b/README.md
@@ -14,12 +14,6 @@ not catch any typos in your JSON objects, and you wouldn't get autocomplete for
any parts of the JSON data structure. By providing a set of native Python
wrappers, this project makes it easier and faster to write CDP client code.
-**This library does not perform any I/O!** In order to maximize
-flexibility, this library does not actually handle any network I/O, such as
-opening a socket or negotiating a WebSocket protocol. Instead, that
-responsibility is left to higher-level libraries, for example
-[trio-chrome-devtools-protocol][4].
-
## Installation
You can install this library as a dependency on your project with:
```
@@ -29,7 +23,28 @@ Change the git tag `@1.0.0` if you need another version. To install for developm
repository, install [Poetry][5] package manager and run `poetry install` to install dependencies.
## Usage
-You can install this package as a dependency to use the builtin CDP types with `import cdp`, but if you want to try a different CDP version you can build new wrappers with `cdpgen` command:
+If all you want is automate Chrome right now, `pycdp.asyncio` module contains a low-level client for asyncio:
+```python
+import asyncio
+from pycdp import cdp
+from pycdp.asyncio import connect_cdp
+
+
+async def main():
+ conn = await connect_cdp('http://localhost:9222')
+ target_id = await conn.execute(cdp.target.create_target('about:blank'))
+ target_session = await conn.connect_session(target_id)
+ await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/tot/Page/#method-navigate'))
+ try:
+ await asyncio.get_running_loop().create_future()
+ finally:
+ await target_session.execute(cdp.page.close())
+
+asyncio.run(main())
+```
+where chrome debugger is listening on `http://localhost:9222` (started by `google-chrome --remote-debugging-port=9222`).
+
+You also can use just the builtin CDP types with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command:
```
usage: cdpgen
@@ -52,10 +67,9 @@ cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json -
You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
## Implementation of a CDP client
-The `cdp` package follows same structure of CDP domains, each domain is a Python class and each command
-a method of that class.
+The `pycdp.cdp` package follows same structure of CDP domains, each domain is Python module and each command a function in that module.
-Each method is a generator function with a single yield which is a Python dict, on the CDP wire format,
+Each function is a generator with a single yield which is a Python dict, on the CDP wire format,
containing the message that should be sent to the browser, on resumption the generator receives the message from browser:
```python
import cdp
diff --git a/poetry.lock b/poetry.lock
index 9d15188..acea45d 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,3 +1,36 @@
+[[package]]
+name = "aiohttp"
+version = "3.8.1"
+description = "Async http client/server framework (asyncio)"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["aiodns", "brotli", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.2.0"
+description = "aiosignal: a list of registered asynchronous callbacks"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
[[package]]
name = "alabaster"
version = "0.7.12"
@@ -6,6 +39,25 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "async-timeout"
+version = "4.0.2"
+description = "Timeout context manager for asyncio programs"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
+
+[[package]]
+name = "asynctest"
+version = "0.13.0"
+description = "Enhance the standard unittest package with features for testing asyncio libraries"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+
[[package]]
name = "atomicwrites"
version = "1.4.0"
@@ -18,7 +70,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
name = "attrs"
version = "21.4.0"
description = "Classes Without Boilerplate"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
@@ -51,7 +103,7 @@ python-versions = "*"
name = "charset-normalizer"
version = "2.0.12"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.5.0"
@@ -88,11 +140,19 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+[[package]]
+name = "frozenlist"
+version = "1.3.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.5"
@@ -159,6 +219,14 @@ category = "dev"
optional = false
python-versions = ">=3.5"
+[[package]]
+name = "multidict"
+version = "6.0.2"
+description = "multidict implementation"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+
[[package]]
name = "mypy"
version = "0.770"
@@ -432,7 +500,7 @@ python-versions = "*"
name = "typing-extensions"
version = "4.1.1"
description = "Backported and Experimental Type Hints for Python 3.6+"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.6"
@@ -465,6 +533,19 @@ category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+[[package]]
+name = "yarl"
+version = "1.7.2"
+description = "Yet another URL library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
+
[[package]]
name = "zipp"
version = "3.7.0"
@@ -480,13 +561,99 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
-content-hash = "2b3d1f81cf2afe7ae04cb7fc53374f4018d8aea8239e2d3c7a859453ec94c376"
+content-hash = "ed29ffc1133f17161446637668c63a01554ba204abab6d4388f9da50df66b182"
[metadata.files]
+aiohttp = [
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"},
+ {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"},
+]
+aiosignal = [
+ {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"},
+ {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"},
+]
alabaster = [
{file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
{file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
]
+async-timeout = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
+asynctest = [
+ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
+ {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
+]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
@@ -519,6 +686,67 @@ docutils = [
{file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
]
+frozenlist = [
+ {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"},
+ {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"},
+ {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"},
+ {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"},
+ {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"},
+ {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"},
+ {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"},
+ {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"},
+ {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"},
+ {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"},
+ {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"},
+ {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"},
+ {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"},
+ {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"},
+ {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"},
+ {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"},
+ {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"},
+ {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"},
+ {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"},
+ {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"},
+ {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"},
+ {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"},
+ {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"},
+ {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"},
+ {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"},
+ {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"},
+ {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"},
+ {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"},
+ {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"},
+ {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"},
+ {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"},
+ {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"},
+ {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"},
+ {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"},
+ {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"},
+ {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"},
+ {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"},
+ {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"},
+ {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"},
+ {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"},
+ {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"},
+ {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"},
+ {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"},
+ {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"},
+ {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"},
+ {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"},
+ {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"},
+]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
@@ -585,6 +813,67 @@ more-itertools = [
{file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
{file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
]
+multidict = [
+ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"},
+ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"},
+ {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"},
+ {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"},
+ {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"},
+ {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"},
+ {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"},
+ {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"},
+ {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"},
+ {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"},
+ {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"},
+ {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"},
+ {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"},
+ {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"},
+ {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"},
+ {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"},
+ {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"},
+ {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"},
+ {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"},
+ {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"},
+ {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"},
+ {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"},
+ {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"},
+]
mypy = [
{file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
{file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
@@ -787,6 +1076,80 @@ wrapt = [
{file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"},
{file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"},
]
+yarl = [
+ {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"},
+ {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"},
+ {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"},
+ {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"},
+ {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"},
+ {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"},
+ {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"},
+ {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"},
+ {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"},
+ {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"},
+ {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"},
+ {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"},
+ {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"},
+ {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"},
+ {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"},
+ {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"},
+ {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"},
+ {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"},
+ {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"},
+ {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"},
+ {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"},
+ {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"},
+ {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"},
+ {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"},
+ {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"},
+ {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"},
+ {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"},
+ {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"},
+]
zipp = [
{file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"},
{file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"},
diff --git a/cdpgen/__init__.py b/pycdp/__init__.py
similarity index 100%
rename from cdpgen/__init__.py
rename to pycdp/__init__.py
diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py
new file mode 100644
index 0000000..4af8d75
--- /dev/null
+++ b/pycdp/asyncio.py
@@ -0,0 +1,429 @@
+from __future__ import annotations
+import json
+import asyncio
+import itertools
+import typing as t
+from collections import defaultdict
+from contextlib import asynccontextmanager
+from aiohttp import ClientSession
+from aiohttp.client import ClientWebSocketResponse
+from aiohttp.http_websocket import WSMsgType, WSCloseCode
+from aiohttp.client_exceptions import (
+ ClientResponseError, ClientConnectorError, ClientConnectionError, ServerDisconnectedError
+)
+from pycdp.utils import ContextLoggerMixin, LoggerMixin, SingleTaskWorker, retry_on
+from pycdp import cdp
+
+
+T = t.TypeVar('T')
+
+
+class CDPError(Exception):
+ pass
+
+
+class CDPBrowserError(CDPError):
+ ''' This exception is raised when the browser's response to a command
+ indicates that an error occurred. '''
+ def __init__(self, obj):
+ self.code: int = obj['code']
+ self.message: str = obj['message']
+ self.detail = obj.get('data')
+
+ def __str__(self):
+ return 'BrowserError {}'.format(self.code,
+ self.message, self.detail)
+
+
+class CDPConnectionClosed(CDPError):
+ ''' Raised when a public method is called on a closed CDP connection. '''
+ def __init__(self, reason):
+ '''
+ Constructor.
+ :param reason:
+ :type reason: wsproto.frame_protocol.CloseReason
+ '''
+ self.reason = reason
+
+ def __repr__(self):
+ ''' Return representation. '''
+ return '{}<{}>'.format(self.__class__.__name__, self.reason)
+
+
+class CDPSessionClosed(CDPError):
+ pass
+
+
+class CDPInternalError(CDPError):
+ ''' This exception is only raised when there is faulty logic in TrioCDP or
+ the integration with PyCDP. '''
+
+
+class CDPEventListenerClosed(CDPError):
+ pass
+
+_CLOSE_SENTINEL = object
+class CDPEventListener:
+
+ def __init__(self, queue: asyncio.Queue):
+ self._queue = queue
+ self._closed = False
+
+ @property
+ def closed(self):
+ return self._closed
+
+ def put(self, elem: dict):
+ if self._closed: raise CDPEventListenerClosed
+ self._queue.put_nowait(elem)
+
+ def close(self):
+ self._closed = True
+ try:
+ self._queue.put_nowait(_CLOSE_SENTINEL)
+ except asyncio.QueueFull:
+ pass
+
+ async def __aiter__(self):
+ try:
+ while not self._closed:
+ elem = await self._queue.get()
+ if elem is _CLOSE_SENTINEL:
+ return
+ yield elem
+ finally:
+ self._closed = True
+
+ def __str__(self) -> str:
+ return f'{self.__class__.__name__}(buffer={repr(self._queue.qsize())}/{repr(self._queue.maxsize())}, closed={repr(self._closed)})'
+
+
+class CDPBase(LoggerMixin):
+ '''
+ Contains shared functionality between the CDP connection and session.
+ '''
+ def __init__(self, ws: ClientWebSocketResponse=None, session_id=None, target_id=None):
+ super().__init__()
+ self._listeners: t.Dict[type, t.Set[CDPEventListener]] = defaultdict(set)
+ self._id_iter = itertools.count()
+ self._inflight_cmd: t.Dict[int, t.Tuple[t.Generator[dict, dict , t.Any], asyncio.Future]] = {}
+ self._session_id = session_id
+ self._target_id = target_id
+ self._ws = ws
+
+ @property
+ def session_id(self) -> cdp.target.SessionID:
+ return self._session_id
+
+ async def execute(self, cmd: t.Generator[dict, dict , T]) -> T:
+ '''
+ Execute a command on the server and wait for the result.
+
+ :param cmd: any CDP command
+ :returns: a CDP result
+ '''
+ cmd_id = next(self._id_iter)
+ cmd_response = asyncio.get_running_loop().create_future()
+ self._inflight_cmd[cmd_id] = cmd, cmd_response
+ request = next(cmd)
+ request['id'] = cmd_id
+ if self._session_id:
+ request['sessionId'] = self._session_id
+ self._logger.debug('sending command %r', request)
+ request_str = json.dumps(request)
+ try:
+ try:
+ await self._ws.send_str(request_str)
+ except ConnectionResetError as e:
+ del self._inflight_cmd[cmd_id]
+ raise CDPConnectionClosed(e.args[0]) from e
+ return await cmd_response
+ except asyncio.CancelledError:
+ if cmd_id in self._inflight_cmd:
+ del self._inflight_cmd[cmd_id]
+ raise
+
+ def listen(self, *event_types: t.Type[T], buffer_size=100) -> t.AsyncIterator[T]:
+ '''Return an async iterator that iterates over events matching the
+ indicated types.'''
+ receiver = CDPEventListener(asyncio.Queue(buffer_size))
+ for event_type in event_types:
+ self._listeners[event_type].add(receiver)
+ return receiver.__aiter__()
+
+ @asynccontextmanager
+ async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGenerator[T, None]:
+ '''
+ Wait for an event of the given type and return it.
+
+ This is an async context manager, so you should open it inside an async
+ with block. The block will not exit until the indicated event is
+ received.
+ '''
+ async for event in self.listen(event_type, buffer_size):
+ yield event
+ return
+
+ def close_listeners(self):
+ for listener in itertools.chain.from_iterable(self._listeners.values()):
+ listener.close()
+ self._listeners.clear()
+
+ def _handle_data(self, data):
+ '''
+ Handle incoming WebSocket data.
+
+ :param dict data: a JSON dictionary
+ '''
+ if 'id' in data:
+ self._handle_cmd_response(data)
+ else:
+ self._handle_event(data)
+
+ def _handle_cmd_response(self, data):
+ '''
+ Handle a response to a command. This will set an event flag that will
+ return control to the task that called the command.
+
+ :param dict data: response as a JSON dictionary
+ '''
+ cmd_id = data['id']
+ try:
+ cmd, event = self._inflight_cmd.pop(cmd_id)
+ except KeyError:
+ self._logger.debug('got a message with a command ID that does not exist: %s', data)
+ return
+ if 'error' in data:
+ # If the server reported an error, convert it to an exception and do
+ # not process the response any further.
+ event.set_exception(CDPBrowserError(data['error']))
+ else:
+ # Otherwise, continue the generator to parse the JSON result
+ # into a CDP object.
+ try:
+ cmd.send(data['result'])
+ event.set_exception(CDPInternalError("the command's generator function did not exit when expected!"))
+ except StopIteration as e:
+ event.set_result(e.value)
+
+ def _handle_event(self, data):
+ '''
+ Handle an event.
+
+ :param dict data: event as a JSON dictionary
+ '''
+ event = cdp.util.parse_json_event(data)
+ self._logger.debug('dispatching event %s', event)
+ to_remove = set()
+ for listener in self._listeners[type(event)]:
+ try:
+ listener.put(event)
+ except asyncio.QueueFull:
+ self._logger.warning('event %s dropped because listener %s queue is full', type(event), listener)
+ except CDPEventListenerClosed:
+ to_remove.add(listener)
+ self._listeners[type(event)] -= to_remove
+ self._logger.debug('event dispatched')
+
+
+class CDPConnection(CDPBase, SingleTaskWorker):
+ '''
+ Contains the connection state for a Chrome DevTools Protocol server.
+
+ CDP can multiplex multiple "sessions" over a single connection. This class
+ corresponds to the "root" session, i.e. the implicitly created session that
+ has no session ID. This class is responsible for reading incoming WebSocket
+ messages and forwarding them to the corresponding session, as well as
+ handling messages targeted at the root session itself.
+
+ You should generally call the :func:`open_cdp()` instead of
+ instantiating this class directly.
+ '''
+ def __init__(self, debugging_url: str, http_client: ClientSession):
+ super().__init__()
+ self._debugging_url = debugging_url.rstrip('/')
+ self._http_client = http_client
+ self._wsurl: str = None
+ self._ws_context = None
+ self._sessions: t.Dict[str, CDPSession] = {}
+
+ @property
+ def closed(self) -> bool:
+ return self._ws.closed
+
+ @property
+ def had_normal_closure(self) -> bool:
+ return self._ws.close_code == WSCloseCode.OK
+
+ @retry_on(
+ ClientConnectorError, asyncio.TimeoutError,
+ retries=10, delay=3.0, delay_growth=1.3, log_errors=True
+ )
+ async def connect(self):
+ if self._ws is not None: raise RuntimeError('already connected')
+ if self._wsurl is None:
+ if self._debugging_url.startswith('http://'):
+ async with self._http_client.get(f'{self._debugging_url}/json/version') as resp:
+ if resp.status != 200:
+ raise ClientResponseError(
+ resp.request_info,
+ resp.history,
+ status=resp.status,
+ message=resp.reason,
+ headers=resp.headers
+ )
+ self._wsurl = (await resp.json())['webSocketDebuggerUrl']
+ elif self._debugging_url.startswith('ws://'):
+ self._wsurl = self._debugging_url
+ else:
+ raise ValueError('bad debugging URL scheme')
+ self._ws = await self._http_client.ws_connect(self._wsurl, compress=15, autoping=True, autoclose=True).__aenter__()
+
+ def add_session(self, session_id: str, target_id: str) -> CDPSession:
+ if session_id is self._sessions:
+ return self._sessions[session_id]
+ session = CDPSession(self._ws, session_id, target_id)
+ self._sessions[session_id] = session
+ return session
+
+ def remove_session(self, session_id: str):
+ if session_id in self._sessions:
+ self._sessions.pop(session_id).close()
+
+ async def connect_session(self, target_id: cdp.target.TargetID) -> 'CDPSession':
+ '''
+ Returns a new :class:`CDPSession` connected to the specified target.
+ '''
+ session_id = await self.execute(cdp.target.attach_to_target(target_id, True))
+ session = CDPSession(self._ws, session_id, target_id)
+ self._sessions[session_id] = session
+ return session
+
+ async def _run(self):
+ while True:
+ message = await self._ws.receive()
+ if message.type == WSMsgType.TEXT:
+ try:
+ data = json.loads(message.data)
+ except json.JSONDecodeError:
+ raise CDPBrowserError({
+ 'code': -32700,
+ 'message': 'Client received invalid JSON',
+ 'data': message
+ })
+ if 'sessionId' in data:
+ session_id = cdp.target.SessionID(data['sessionId'])
+ try:
+ session = self._sessions[session_id]
+ except KeyError:
+ self._logger.debug(f'received message for unknown session: {data}')
+ continue
+ session._handle_data(data)
+ else:
+ self._handle_data(data)
+ elif message.type == WSMsgType.CLOSE or message.type == WSMsgType.CLOSING or message.type == WSMsgType.CLOSED:
+ return
+ elif message.type == WSMsgType.ERROR:
+ raise message.data
+ else:
+ await self._ws.close(code=WSCloseCode.UNSUPPORTED_DATA)
+ raise CDPConnectionClosed('received non text frame from remote peer')
+
+ async def _close(self):
+ try:
+ await super()._close()
+ for session in self._sessions.values():
+ session.close()
+ self._sessions.clear()
+ self.close_listeners()
+ if self._ws is not None and not self._ws.closed:
+ await self._ws.close()
+ finally:
+ await self._http_client.close()
+
+
+class CDPSession(CDPBase, ContextLoggerMixin):
+ '''
+ Contains the state for a CDP session.
+
+ Generally you should not instantiate this object yourself; you should call
+ :meth:`CdpConnection.open_session`.
+ '''
+ def __init__(self, ws: ClientWebSocketResponse, session_id: cdp.target.SessionID, target_id: cdp.target.TargetID):
+ super().__init__(ws, session_id, target_id)
+ self._dom_enable_count = 0
+ self._dom_enable_lock = asyncio.Lock()
+ self._page_enable_count = 0
+ self._page_enable_lock = asyncio.Lock()
+ self.set_logger_context(extra_name=session_id)
+
+ @asynccontextmanager
+ async def dom_enable(self):
+ '''
+ A context manager that executes ``dom.enable()`` when it enters and then
+ calls ``dom.disable()``.
+
+ This keeps track of concurrent callers and only disables DOM events when
+ all callers have exited.
+ '''
+ async with self._dom_enable_lock:
+ self._dom_enable_count += 1
+ if self._dom_enable_count == 1:
+ await self.execute(cdp.dom.enable())
+
+ yield
+
+ async with self._dom_enable_lock:
+ self._dom_enable_count -= 1
+ if self._dom_enable_count == 0:
+ await self.execute(cdp.dom.disable())
+
+ @asynccontextmanager
+ async def page_enable(self):
+ '''
+ A context manager that executes ``page.enable()`` when it enters and
+ then calls ``page.disable()`` when it exits.
+
+ This keeps track of concurrent callers and only disables page events
+ when all callers have exited.
+ '''
+ async with self._page_enable_lock:
+ self._page_enable_count += 1
+ if self._page_enable_count == 1:
+ await self.execute(cdp.page.enable())
+
+ yield
+
+ async with self._page_enable_lock:
+ self._page_enable_count -= 1
+ if self._page_enable_count == 0:
+ await self.execute(cdp.page.disable())
+
+ def close(self):
+ if len(self._inflight_cmd) > 0:
+ exc = CDPSessionClosed()
+ for (_, event) in self._inflight_cmd.values():
+ if not event.done():
+ event.set_exception(exc)
+ self._inflight_cmd.clear()
+ self.close_listeners()
+
+
+@retry_on(ClientConnectionError, ServerDisconnectedError, retries=10, delay=3.0, delay_growth=1.3, log_errors=True)
+async def connect_cdp(url: str) -> CDPConnection:
+ '''
+ Connect to the browser specified by debugging ``url``.
+
+ This connection is not automatically closed! You can either use the connection
+ object as a context manager (``async with conn:``) or else call ``await
+ conn.aclose()`` on it when you are done with it.
+ '''
+ http = ClientSession()
+ cdp_conn = CDPConnection(url, http)
+ try:
+ await cdp_conn.connect()
+ cdp_conn.start()
+ except:
+ await http.close()
+ raise
+ return cdp_conn
diff --git a/cdp/__init__.py b/pycdp/cdp/__init__.py
similarity index 100%
rename from cdp/__init__.py
rename to pycdp/cdp/__init__.py
diff --git a/cdp/accessibility.py b/pycdp/cdp/accessibility.py
similarity index 100%
rename from cdp/accessibility.py
rename to pycdp/cdp/accessibility.py
diff --git a/cdp/animation.py b/pycdp/cdp/animation.py
similarity index 100%
rename from cdp/animation.py
rename to pycdp/cdp/animation.py
diff --git a/cdp/audits.py b/pycdp/cdp/audits.py
similarity index 100%
rename from cdp/audits.py
rename to pycdp/cdp/audits.py
diff --git a/cdp/background_service.py b/pycdp/cdp/background_service.py
similarity index 100%
rename from cdp/background_service.py
rename to pycdp/cdp/background_service.py
diff --git a/cdp/browser.py b/pycdp/cdp/browser.py
similarity index 100%
rename from cdp/browser.py
rename to pycdp/cdp/browser.py
diff --git a/cdp/cache_storage.py b/pycdp/cdp/cache_storage.py
similarity index 100%
rename from cdp/cache_storage.py
rename to pycdp/cdp/cache_storage.py
diff --git a/cdp/cast.py b/pycdp/cdp/cast.py
similarity index 100%
rename from cdp/cast.py
rename to pycdp/cdp/cast.py
diff --git a/cdp/console.py b/pycdp/cdp/console.py
similarity index 100%
rename from cdp/console.py
rename to pycdp/cdp/console.py
diff --git a/cdp/css.py b/pycdp/cdp/css.py
similarity index 100%
rename from cdp/css.py
rename to pycdp/cdp/css.py
diff --git a/cdp/database.py b/pycdp/cdp/database.py
similarity index 100%
rename from cdp/database.py
rename to pycdp/cdp/database.py
diff --git a/cdp/debugger.py b/pycdp/cdp/debugger.py
similarity index 100%
rename from cdp/debugger.py
rename to pycdp/cdp/debugger.py
diff --git a/cdp/device_orientation.py b/pycdp/cdp/device_orientation.py
similarity index 100%
rename from cdp/device_orientation.py
rename to pycdp/cdp/device_orientation.py
diff --git a/cdp/dom.py b/pycdp/cdp/dom.py
similarity index 100%
rename from cdp/dom.py
rename to pycdp/cdp/dom.py
diff --git a/cdp/dom_debugger.py b/pycdp/cdp/dom_debugger.py
similarity index 100%
rename from cdp/dom_debugger.py
rename to pycdp/cdp/dom_debugger.py
diff --git a/cdp/dom_snapshot.py b/pycdp/cdp/dom_snapshot.py
similarity index 100%
rename from cdp/dom_snapshot.py
rename to pycdp/cdp/dom_snapshot.py
diff --git a/cdp/dom_storage.py b/pycdp/cdp/dom_storage.py
similarity index 100%
rename from cdp/dom_storage.py
rename to pycdp/cdp/dom_storage.py
diff --git a/cdp/emulation.py b/pycdp/cdp/emulation.py
similarity index 100%
rename from cdp/emulation.py
rename to pycdp/cdp/emulation.py
diff --git a/cdp/event_breakpoints.py b/pycdp/cdp/event_breakpoints.py
similarity index 100%
rename from cdp/event_breakpoints.py
rename to pycdp/cdp/event_breakpoints.py
diff --git a/cdp/fetch.py b/pycdp/cdp/fetch.py
similarity index 100%
rename from cdp/fetch.py
rename to pycdp/cdp/fetch.py
diff --git a/cdp/headless_experimental.py b/pycdp/cdp/headless_experimental.py
similarity index 100%
rename from cdp/headless_experimental.py
rename to pycdp/cdp/headless_experimental.py
diff --git a/cdp/heap_profiler.py b/pycdp/cdp/heap_profiler.py
similarity index 100%
rename from cdp/heap_profiler.py
rename to pycdp/cdp/heap_profiler.py
diff --git a/cdp/indexed_db.py b/pycdp/cdp/indexed_db.py
similarity index 100%
rename from cdp/indexed_db.py
rename to pycdp/cdp/indexed_db.py
diff --git a/cdp/input_.py b/pycdp/cdp/input_.py
similarity index 100%
rename from cdp/input_.py
rename to pycdp/cdp/input_.py
diff --git a/cdp/inspector.py b/pycdp/cdp/inspector.py
similarity index 100%
rename from cdp/inspector.py
rename to pycdp/cdp/inspector.py
diff --git a/cdp/io.py b/pycdp/cdp/io.py
similarity index 100%
rename from cdp/io.py
rename to pycdp/cdp/io.py
diff --git a/cdp/layer_tree.py b/pycdp/cdp/layer_tree.py
similarity index 100%
rename from cdp/layer_tree.py
rename to pycdp/cdp/layer_tree.py
diff --git a/cdp/log.py b/pycdp/cdp/log.py
similarity index 100%
rename from cdp/log.py
rename to pycdp/cdp/log.py
diff --git a/cdp/media.py b/pycdp/cdp/media.py
similarity index 100%
rename from cdp/media.py
rename to pycdp/cdp/media.py
diff --git a/cdp/memory.py b/pycdp/cdp/memory.py
similarity index 100%
rename from cdp/memory.py
rename to pycdp/cdp/memory.py
diff --git a/cdp/network.py b/pycdp/cdp/network.py
similarity index 100%
rename from cdp/network.py
rename to pycdp/cdp/network.py
diff --git a/cdp/overlay.py b/pycdp/cdp/overlay.py
similarity index 100%
rename from cdp/overlay.py
rename to pycdp/cdp/overlay.py
diff --git a/cdp/page.py b/pycdp/cdp/page.py
similarity index 100%
rename from cdp/page.py
rename to pycdp/cdp/page.py
diff --git a/cdp/performance.py b/pycdp/cdp/performance.py
similarity index 100%
rename from cdp/performance.py
rename to pycdp/cdp/performance.py
diff --git a/cdp/performance_timeline.py b/pycdp/cdp/performance_timeline.py
similarity index 100%
rename from cdp/performance_timeline.py
rename to pycdp/cdp/performance_timeline.py
diff --git a/cdp/profiler.py b/pycdp/cdp/profiler.py
similarity index 100%
rename from cdp/profiler.py
rename to pycdp/cdp/profiler.py
diff --git a/cdp/py.typed b/pycdp/cdp/py.typed
similarity index 100%
rename from cdp/py.typed
rename to pycdp/cdp/py.typed
diff --git a/cdp/runtime.py b/pycdp/cdp/runtime.py
similarity index 100%
rename from cdp/runtime.py
rename to pycdp/cdp/runtime.py
diff --git a/cdp/schema.py b/pycdp/cdp/schema.py
similarity index 100%
rename from cdp/schema.py
rename to pycdp/cdp/schema.py
diff --git a/cdp/security.py b/pycdp/cdp/security.py
similarity index 100%
rename from cdp/security.py
rename to pycdp/cdp/security.py
diff --git a/cdp/service_worker.py b/pycdp/cdp/service_worker.py
similarity index 100%
rename from cdp/service_worker.py
rename to pycdp/cdp/service_worker.py
diff --git a/cdp/storage.py b/pycdp/cdp/storage.py
similarity index 100%
rename from cdp/storage.py
rename to pycdp/cdp/storage.py
diff --git a/cdp/system_info.py b/pycdp/cdp/system_info.py
similarity index 100%
rename from cdp/system_info.py
rename to pycdp/cdp/system_info.py
diff --git a/cdp/target.py b/pycdp/cdp/target.py
similarity index 100%
rename from cdp/target.py
rename to pycdp/cdp/target.py
diff --git a/cdp/tethering.py b/pycdp/cdp/tethering.py
similarity index 100%
rename from cdp/tethering.py
rename to pycdp/cdp/tethering.py
diff --git a/cdp/tracing.py b/pycdp/cdp/tracing.py
similarity index 100%
rename from cdp/tracing.py
rename to pycdp/cdp/tracing.py
diff --git a/cdp/util.py b/pycdp/cdp/util.py
similarity index 100%
rename from cdp/util.py
rename to pycdp/cdp/util.py
diff --git a/cdp/web_audio.py b/pycdp/cdp/web_audio.py
similarity index 100%
rename from cdp/web_audio.py
rename to pycdp/cdp/web_audio.py
diff --git a/cdp/web_authn.py b/pycdp/cdp/web_authn.py
similarity index 100%
rename from cdp/web_authn.py
rename to pycdp/cdp/web_authn.py
diff --git a/cdpgen/README.md b/pycdp/gen/README.md
similarity index 100%
rename from cdpgen/README.md
rename to pycdp/gen/README.md
diff --git a/pycdp/gen/__init__.py b/pycdp/gen/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cdpgen/browser_protocol.json b/pycdp/gen/browser_protocol.json
similarity index 100%
rename from cdpgen/browser_protocol.json
rename to pycdp/gen/browser_protocol.json
diff --git a/cdpgen/generate.py b/pycdp/gen/generate.py
similarity index 100%
rename from cdpgen/generate.py
rename to pycdp/gen/generate.py
diff --git a/cdpgen/js_protocol.json b/pycdp/gen/js_protocol.json
similarity index 100%
rename from cdpgen/js_protocol.json
rename to pycdp/gen/js_protocol.json
diff --git a/cdpgen/test_generate.py b/pycdp/gen/test_generate.py
similarity index 100%
rename from cdpgen/test_generate.py
rename to pycdp/gen/test_generate.py
diff --git a/pycdp/utils.py b/pycdp/utils.py
new file mode 100644
index 0000000..fec9608
--- /dev/null
+++ b/pycdp/utils.py
@@ -0,0 +1,367 @@
+import sys
+import random
+import inspect
+import asyncio
+import logging
+import functools
+import typing as t
+from types import SimpleNamespace, TracebackType
+
+
+_T = t.TypeVar('_T')
+
+
+class LoggerMixin:
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._logger = logging.getLogger(f'pycdp.{type(self).__name__}')
+
+class ContextLoggerMixin(LoggerMixin):
+ logging.getLogger('pycdp.ContextLoggerMixin') # just create the logger
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._logger = logging.LoggerAdapter(
+ logging.getLogger(f'pycdp.ContextLoggerMixin.{type(self).__name__}'),
+ {}
+ )
+ self.set_logger_context(realname=f'pycdp.{type(self).__name__}')
+
+ def set_logger_context(self, **context):
+ self._logger.extra.update(context)
+
+
+class DoneTask:
+
+ def done(self):
+ return True
+
+ def cancel(self):
+ pass
+
+
+class Retry(LoggerMixin):
+
+ def __init__(self,
+ func,
+ exception_class: t.Collection[BaseException],
+ *,
+ retries: int = 1,
+ on_error: t.Union[str, t.Callable[[], t.Awaitable[None]]] = None,
+ log_errors: bool = False
+ ):
+ super().__init__()
+ self._func = func
+ self._errors = exception_class
+ self._retries = retries
+ self._log_errors = log_errors
+ self._on_error_cb = on_error
+
+ async def __call__(self, *args, **kwargs):
+ context = self._create_call_context()
+ for i in range(self._retries + 1):
+ try:
+ return await self._func(*args, **kwargs)
+ except self._errors as e: # type: ignore
+ if i == self._retries:
+ raise e
+ if self._log_errors:
+ if self._logger.getEffectiveLevel() == logging.DEBUG:
+ self._logger.exception(
+ 'error in %s() (attempt %d of %d, at %s:%d), retrying:',
+ self._func.__qualname__,
+ i + 1,
+ self._retries,
+ self._get_appcode_frame(sys.exc_info()[-1]).tb_frame.f_code.co_filename,
+ self._get_appcode_frame(sys.exc_info()[-1]).tb_lineno
+ )
+ else:
+ self._logger.error(
+ 'error in %s() (attempt %d of %d, at %s:%d), retrying: %s',
+ self._func.__qualname__,
+ i + 1,
+ self._retries,
+ self._get_appcode_frame(sys.exc_info()[-1]).tb_frame.f_code.co_filename,
+ self._get_appcode_frame(sys.exc_info()[-1]).tb_lineno,
+ repr(e)
+ )
+ await self._on_error(args[0] if len(args) > 0 else None, context)
+
+ def _create_call_context(self):
+ return None
+
+ async def _on_error(self, instance, context):
+ if self._on_error_cb is not None:
+ if isinstance(self._on_error_cb, str):
+ cb = getattr(instance, self._on_error_cb)
+ else:
+ cb = self._on_error_cb
+ result = cb()
+ if inspect.isawaitable(result):
+ await result
+
+ def _get_appcode_frame(self, exc: TracebackType):
+ """Returns traceback frame from code outside this file."""
+ while True:
+ if exc.tb_next is None or exc.tb_frame.f_code.co_filename != __file__:
+ return exc
+ exc = exc.tb_next
+
+
+class DelayedRetry(Retry):
+
+ def __init__(self, delay: float, delay_growth: float, max_delay: float, **kwargs):
+ super().__init__(**kwargs)
+ self._delay = delay
+ self._delay_growth = delay_growth
+ self._max_delay = max_delay
+
+ async def _on_error(self, instance, context):
+ await super()._on_error(instance, context)
+ delay = self._get_delay(context)
+ if delay > 0.0:
+ await asyncio.sleep(delay)
+ self._grow_delay(context)
+
+ def _create_call_context(self):
+ return SimpleNamespace(current_delay=self._delay)
+
+ def _get_delay(self, context):
+ return min(context.current_delay, self._max_delay)
+
+ def _grow_delay(self, context):
+ try:
+ context.current_delay *= self._delay_growth
+ except OverflowError:
+ context.current_delay = self._max_delay
+
+class RandomDelayedRetry(DelayedRetry):
+
+ def _create_call_context(self):
+ return SimpleNamespace(current_delay=self._delay[1])
+
+ def _get_delay(self, context):
+ return random.uniform(self._delay[0], super()._get_delay(context))
+
+def retry_on(
+ *exception_class: t.Type[BaseException],
+ retries: int = 1,
+ delay: t.Union[float, t.Tuple[float, float]] = 0.0,
+ delay_growth: float = 1.0,
+ max_delay: int = 600,
+ log_errors: bool = False,
+ on_error: str = None
+):
+ if not isinstance(delay, (float, tuple)):
+ raise TypeError('delay must be a float or a tuple of 2 floats')
+ def deco_factory(func):
+ if type(delay) is float:
+ if delay <= 0.0:
+ decorator = Retry(func, exception_class, retries=retries, log_errors=log_errors, on_error=on_error)
+ else:
+ decorator = DelayedRetry(
+ delay,
+ delay_growth,
+ max_delay,
+ func=func,
+ exception_class=exception_class,
+ retries=retries,
+ log_errors=log_errors,
+ on_error=on_error
+ )
+ else:
+ decorator = RandomDelayedRetry(
+ delay,
+ delay_growth,
+ max_delay,
+ func=func,
+ exception_class=exception_class,
+ retries=retries,
+ log_errors=log_errors,
+ on_error=on_error
+ )
+ @functools.wraps(func)
+ async def func_wrapper(*args, **kwargs):
+ return await decorator(*args, **kwargs)
+ return func_wrapper
+ return deco_factory
+
+
+class Closable(LoggerMixin):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._closing = False
+ self._closed = False
+ self._close_event = asyncio.Event()
+
+ @property
+ def is_open(self):
+ return not self._closing and not self._closed
+
+ @property
+ def closed(self):
+ return self._closed
+
+ async def wait_closed(self):
+ await self._close_event.wait()
+
+ async def close(self):
+ if self._closed:
+ await asyncio.sleep(0)
+ return
+ elif self._closing:
+ await self._close_event.wait()
+ else:
+ self._logger.debug('closing...')
+ self._closing = True
+ try:
+ await self._close()
+ finally:
+ self._closing = False
+ self._closed = True
+ self._logger.info('closed.')
+ self._close_event.set()
+
+ async def _close(self):
+ pass
+
+
+class WorkerBase(Closable):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._started = False
+ self._closing = False
+ self._closed = True
+
+ @property
+ def is_open(self):
+ return self._started and super().is_open
+
+ def start(self):
+ if self._started:
+ raise RuntimeError('already started')
+ if not self._closed:
+ raise RuntimeError('expected worker to be closed on startup')
+ self._logger.info('start working')
+ self._started = True
+ self._closing = False
+ self._closed = False
+ self._startup()
+
+ def _startup(self) -> None:
+ pass
+
+ async def close(self):
+ await super().close()
+ self._started = False
+
+
+class SubtaskSpawner(Closable):
+ """Keeps track of spanwed async tasks."""
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._subtasks: t.List[asyncio.Task] = []
+ self._exception_waiter = asyncio.get_running_loop().create_future()
+ self._exception_handlers = 0
+
+ async def wait_exception(self):
+ """Catch first exception raised from any subtask of this spawner."""
+ try:
+ self._exception_handlers += 1
+ await asyncio.shield(self._exception_waiter)
+ finally:
+ self._exception_handlers -= 1
+
+ async def wait_subtasks(self):
+ """Wait all *current* subtasks to finish and return their result even if it's an exception."""
+ return await asyncio.gather(*(asyncio.shield(task) for task in self._subtasks), return_exceptions=True)
+
+ async def _close(self):
+ await super()._close()
+ self._cancel_subtasks()
+ self._exception_waiter.cancel()
+ self._subtasks.clear()
+
+ def _cancel_subtasks(self):
+ for task in self._subtasks:
+ task.cancel()
+
+ def _create_subtask(self, coro: t.Union[t.Coroutine[t.Any, t.Any, _T], 'asyncio.Future[_T]'], name=None) -> 'asyncio.Future[_T]':
+ if not self.is_open:
+ raise RuntimeError(f'{type(self).__name__} is not open')
+ if inspect.iscoroutine(coro):
+ task = asyncio.create_task(coro, name=name)
+ else:
+ task = coro
+ task.add_done_callback(self._check_subtask_result)
+ self._subtasks.append(task)
+ return task
+
+ def _check_subtask_result(self, task: asyncio.Task):
+ try:
+ task.result()
+ except asyncio.CancelledError as exc:
+ self._logger.debug('the subtask %s was cancelled', repr(task))
+ except BaseException as exc:
+ if self._exception_handlers > 0:
+ if not self._exception_waiter.done():
+ self._logger.debug(
+ 'firing the exception handler for %s from subtask %s',
+ type(exc).__name__, repr(task)
+ )
+ self._exception_waiter.set_exception(exc)
+ else:
+ self._logger.exception(
+ 'an error happened in the subtask %s but exception handler was already fired:',
+ repr(task)
+ )
+ else:
+ self._logger.exception('an error happened in the subtask %s:', repr(task))
+
+
+class Worker(SubtaskSpawner, WorkerBase):
+ """Daemon object that does some king of work."""
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._subworkers: t.List[Worker] = []
+
+ def _startup(self) -> None:
+ super()._startup()
+ if self._exception_waiter.done():
+ self._exception_waiter = asyncio.get_running_loop().create_future()
+
+ async def close_on_exception(self, exc: Exception):
+ await self.close()
+
+ def _start_subworker(self, worker: 'Worker'):
+ self._create_subtask(self._watch_subworker(worker))
+ worker.start()
+
+ async def _watch_subworker(self, worker: 'Worker'):
+ self._subworkers.append(worker)
+ try:
+ await worker.wait_exception()
+ finally:
+ self._subworkers.remove(worker)
+
+ async def _close(self):
+ await super()._close()
+ await asyncio.gather(*(worker.close() for worker in self._subworkers if worker.is_open))
+
+
+class SingleTaskWorker(Worker):
+
+ def _startup(self):
+ super()._startup()
+ self._start_run_task()
+
+ def _start_run_task(self):
+ self._create_subtask(self._run())
+
+ async def _run(self):
+ raise NotImplementedError
diff --git a/pyproject.toml b/pyproject.toml
index 8b40b7b..ccef09e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,10 +1,9 @@
[tool.poetry]
-name = "chrome-devtools-protocol"
-version = "1.0.0"
+name = "python-cdp"
+version = "1.1.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
packages = [
- {include = "cdp"},
- {include = "cdpgen"}
+ {include = "pycdp"}
]
license = "MIT"
readme = "README.md"
@@ -25,6 +24,7 @@ authors = [
python = "^3.7"
deprecated = "1.2.9"
inflection = "0.4.0"
+aiohttp = "3.8.1"
[tool.poetry.dev-dependencies]
mypy = "^0.770"
@@ -34,7 +34,7 @@ sphinx-autodoc-typehints = "^1.10.3"
sphinx-rtd-theme = "^0.4.3"
[tool.poetry.scripts]
-cdpgen = "cdpgen.generate:cdpgen"
+cdpgen = "pycdp.gen.generate:cdpgen"
[build-system]
requires = ["poetry>=0.12"]
From 85df5116aee7ca52015d4178323adc1dc83be221 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Wed, 27 Apr 2022 13:15:15 -0300
Subject: [PATCH 43/81] fix test imports
---
test/test_cdp.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/test/test_cdp.py b/test/test_cdp.py
index fd2bb9d..00334df 100644
--- a/test/test_cdp.py
+++ b/test/test_cdp.py
@@ -1,7 +1,7 @@
'''
Some basic tests for the generated CDP modules.
'''
-from cdp import dom, io, page, tracing, util
+from pycdp.cdp import dom, io, page, tracing, util
def test_primitive_type():
From ba47b7f44a3ee40ce85be6196d7dafbebac961d2 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Wed, 27 Apr 2022 13:39:37 -0300
Subject: [PATCH 44/81] update version tag
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 0b2de16..751639d 100644
--- a/README.md
+++ b/README.md
@@ -17,9 +17,9 @@ wrappers, this project makes it easier and faster to write CDP client code.
## Installation
You can install this library as a dependency on your project with:
```
-pip install git+https://github.com/HMaker/python-cdp.git@1.0.0
+pip install git+https://github.com/HMaker/python-cdp.git@latest
```
-Change the git tag `@1.0.0` if you need another version. To install for development, clone this
+Change the git tag `@latest` if you need another version. To install for development, clone this
repository, install [Poetry][5] package manager and run `poetry install` to install dependencies.
## Usage
From f6422815680300380249b6b0faff14dded3246a0 Mon Sep 17 00:00:00 2001
From: Enric
Date: Thu, 28 Apr 2022 00:06:57 +0200
Subject: [PATCH 45/81] add events listening example
---
README.md | 18 ++++++++++++++++--
1 file changed, 16 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 751639d..38ccd53 100644
--- a/README.md
+++ b/README.md
@@ -29,19 +29,33 @@ import asyncio
from pycdp import cdp
from pycdp.asyncio import connect_cdp
+async def listen_request_responses(target_session):
+ async for event in target_session.listen(cdp.network.ResponseReceived):
+ # loop which runs for each new event
+ print(event)
+
+async def listen_ws_message(target_session):
+ async with target_session.wait_for(cdp.network.WebSocketFrameSent) as event:
+ # wait_for() its same as listen but is fired a single time only
+ print("this is fired a single time only")
async def main():
conn = await connect_cdp('http://localhost:9222')
target_id = await conn.execute(cdp.target.create_target('about:blank'))
target_session = await conn.connect_session(target_id)
- await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/tot/Page/#method-navigate'))
+ await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
+ await target_session.execute(cdp.network.enable()) # enable the domain
+ tasks = [] # each event listener should run on its own task
try:
- await asyncio.get_running_loop().create_future()
+ tasks.append(asyncio.create_task(listen_request_responses(target_session)))
+ tasks.append(asyncio.create_task(listen_ws_message(target_session)))
+ await asyncio.gather(*tasks) # takes a list of tasks and await them all
finally:
await target_session.execute(cdp.page.close())
asyncio.run(main())
```
+
where chrome debugger is listening on `http://localhost:9222` (started by `google-chrome --remote-debugging-port=9222`).
You also can use just the builtin CDP types with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command:
From a9646a1c4e172ce458c15e2fcb3860ca8c9b4599 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Thu, 28 Apr 2022 11:54:25 -0300
Subject: [PATCH 46/81] fix TypeError on __str__()
---
README.md | 17 +++++++++--------
pycdp/asyncio.py | 2 +-
2 files changed, 10 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index 38ccd53..e294936 100644
--- a/README.md
+++ b/README.md
@@ -31,25 +31,26 @@ from pycdp.asyncio import connect_cdp
async def listen_request_responses(target_session):
async for event in target_session.listen(cdp.network.ResponseReceived):
- # loop which runs for each new event
+ # runs for each new event
print(event)
-async def listen_ws_message(target_session):
- async with target_session.wait_for(cdp.network.WebSocketFrameSent) as event:
- # wait_for() its same as listen but is fired a single time only
+async def listen_websocket_message(target_session):
+ async with target_session.wait_for(cdp.network.WebSocketFrameReceived) as event:
+ # wait_for() is the same as listen() but it's fired a single time only
print("this is fired a single time only")
async def main():
conn = await connect_cdp('http://localhost:9222')
target_id = await conn.execute(cdp.target.create_target('about:blank'))
target_session = await conn.connect_session(target_id)
+ await target_session.execute(cdp.network.enable())
await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
- await target_session.execute(cdp.network.enable()) # enable the domain
- tasks = [] # each event listener should run on its own task
+ tasks = []
try:
+ # each event listener should run on its own task
tasks.append(asyncio.create_task(listen_request_responses(target_session)))
- tasks.append(asyncio.create_task(listen_ws_message(target_session)))
- await asyncio.gather(*tasks) # takes a list of tasks and await them all
+ tasks.append(asyncio.create_task(listen_websocket_message(target_session)))
+ await asyncio.gather(*tasks)
finally:
await target_session.execute(cdp.page.close())
diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py
index 4af8d75..efd30ee 100644
--- a/pycdp/asyncio.py
+++ b/pycdp/asyncio.py
@@ -95,7 +95,7 @@ async def __aiter__(self):
self._closed = True
def __str__(self) -> str:
- return f'{self.__class__.__name__}(buffer={repr(self._queue.qsize())}/{repr(self._queue.maxsize())}, closed={repr(self._closed)})'
+ return f'{self.__class__.__name__}(buffer={self._queue.qsize()}/{self._queue.maxsize}, closed={self._closed})'
class CDPBase(LoggerMixin):
From 293d7ce4f3be1dc28e6ac4ced81983bd97f9cd7e Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 9 Jul 2022 16:48:28 -0300
Subject: [PATCH 47/81] add twisted client
---
.gitignore | 1 +
README.md | 36 ++++-
pycdp/asyncio.py | 45 +------
pycdp/exceptions.py | 45 +++++++
pycdp/twisted.py | 313 ++++++++++++++++++++++++++++++++++++++++++++
5 files changed, 395 insertions(+), 45 deletions(-)
create mode 100644 pycdp/exceptions.py
create mode 100644 pycdp/twisted.py
diff --git a/.gitignore b/.gitignore
index 3eae1d5..f97a05d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,3 +9,4 @@ __pycache__
/.pytest_cache
/.vscode
/.tool-versions
+/test_*
\ No newline at end of file
diff --git a/README.md b/README.md
index e294936..8c4c589 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,7 @@ Change the git tag `@latest` if you need another version. To install for develop
repository, install [Poetry][5] package manager and run `poetry install` to install dependencies.
## Usage
-If all you want is automate Chrome right now, `pycdp.asyncio` module contains a low-level client for asyncio:
+If all you want is automate Chrome right now, PyCDP includes a low-level client for asyncio and twisted:
```python
import asyncio
from pycdp import cdp
@@ -56,6 +56,38 @@ async def main():
asyncio.run(main())
```
+the twisted client requires [twisted][6] and [autobahn][7] packages:
+```python
+from twisted.python.log import err
+from twisted.internet import reactor, defer
+from pycdp import cdp
+from pycdp.twisted import connect_cdp
+
+
+async def main():
+ conn = await connect_cdp('http://localhost:9222', reactor)
+ target_id = await conn.execute(cdp.target.create_target('about:blank'))
+ target_session = await conn.connect_session(target_id)
+ await target_session.execute(cdp.page.enable())
+ await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
+ async with target_session.wait_for(cdp.page.DomContentEventFired):
+ dom = await target_session.execute(cdp.dom.get_document())
+ node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p'))
+ js_node = await target_session.execute(cdp.dom.resolve_node(node))
+ print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value)
+ await target_session.execute(cdp.page.close())
+ await conn.close()
+
+
+def main_error(failure):
+ err(failure)
+ reactor.stop()
+
+d = defer.ensureDeferred(main())
+d.addErrback(main_error)
+d.addCallback(lambda *args: reactor.stop())
+reactor.run()
+```
where chrome debugger is listening on `http://localhost:9222` (started by `google-chrome --remote-debugging-port=9222`).
@@ -112,3 +144,5 @@ PyCDP is licensed under the MIT License.
[3]: docs/getting_started.rst
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
[5]: https://python-poetry.org/docs/
+[6]: https://pypi.org/project/Twisted/
+[7]: https://pypi.org/project/autobahn/
\ No newline at end of file
diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py
index efd30ee..b21ecac 100644
--- a/pycdp/asyncio.py
+++ b/pycdp/asyncio.py
@@ -11,6 +11,7 @@
from aiohttp.client_exceptions import (
ClientResponseError, ClientConnectorError, ClientConnectionError, ServerDisconnectedError
)
+from pycdp.exceptions import *
from pycdp.utils import ContextLoggerMixin, LoggerMixin, SingleTaskWorker, retry_on
from pycdp import cdp
@@ -18,50 +19,6 @@
T = t.TypeVar('T')
-class CDPError(Exception):
- pass
-
-
-class CDPBrowserError(CDPError):
- ''' This exception is raised when the browser's response to a command
- indicates that an error occurred. '''
- def __init__(self, obj):
- self.code: int = obj['code']
- self.message: str = obj['message']
- self.detail = obj.get('data')
-
- def __str__(self):
- return 'BrowserError {}'.format(self.code,
- self.message, self.detail)
-
-
-class CDPConnectionClosed(CDPError):
- ''' Raised when a public method is called on a closed CDP connection. '''
- def __init__(self, reason):
- '''
- Constructor.
- :param reason:
- :type reason: wsproto.frame_protocol.CloseReason
- '''
- self.reason = reason
-
- def __repr__(self):
- ''' Return representation. '''
- return '{}<{}>'.format(self.__class__.__name__, self.reason)
-
-
-class CDPSessionClosed(CDPError):
- pass
-
-
-class CDPInternalError(CDPError):
- ''' This exception is only raised when there is faulty logic in TrioCDP or
- the integration with PyCDP. '''
-
-
-class CDPEventListenerClosed(CDPError):
- pass
-
_CLOSE_SENTINEL = object
class CDPEventListener:
diff --git a/pycdp/exceptions.py b/pycdp/exceptions.py
new file mode 100644
index 0000000..4ad19b6
--- /dev/null
+++ b/pycdp/exceptions.py
@@ -0,0 +1,45 @@
+
+
+class CDPError(Exception):
+ pass
+
+
+class CDPBrowserError(CDPError):
+ ''' This exception is raised when the browser's response to a command
+ indicates that an error occurred. '''
+ def __init__(self, obj):
+ self.code: int = obj['code']
+ self.message: str = obj['message']
+ self.detail = obj.get('data')
+
+ def __str__(self):
+ return 'BrowserError {}'.format(self.code,
+ self.message, self.detail)
+
+
+class CDPConnectionClosed(CDPError):
+ ''' Raised when a public method is called on a closed CDP connection. '''
+ def __init__(self, reason):
+ '''
+ Constructor.
+ :param reason:
+ :type reason: wsproto.frame_protocol.CloseReason
+ '''
+ self.reason = reason
+
+ def __repr__(self):
+ ''' Return representation. '''
+ return '{}<{}>'.format(self.__class__.__name__, self.reason)
+
+
+class CDPSessionClosed(CDPError):
+ pass
+
+
+class CDPInternalError(CDPError):
+ ''' This exception is only raised when there is faulty logic in TrioCDP or
+ the integration with PyCDP. '''
+
+
+class CDPEventListenerClosed(CDPError):
+ pass
\ No newline at end of file
diff --git a/pycdp/twisted.py b/pycdp/twisted.py
new file mode 100644
index 0000000..a5961bc
--- /dev/null
+++ b/pycdp/twisted.py
@@ -0,0 +1,313 @@
+import json
+import itertools
+import typing as t
+from collections import defaultdict
+from contextlib import asynccontextmanager
+from twisted.web.client import Agent, Response, readBody
+from twisted.internet.defer import DeferredQueue, QueueOverflow, Deferred, CancelledError
+from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
+from pycdp.exceptions import *
+from pycdp.utils import ContextLoggerMixin, LoggerMixin
+from pycdp import cdp
+
+
+T = t.TypeVar('T')
+
+
+_CLOSE_SENTINEL = object
+class CDPEventListener:
+
+ def __init__(self, queue: DeferredQueue):
+ self._queue = queue
+ self._closed = False
+
+ @property
+ def closed(self):
+ return self._closed
+
+ def put(self, elem: dict):
+ if self._closed: raise CDPEventListenerClosed
+ self._queue.put(elem)
+
+ def close(self):
+ self._closed = True
+ try:
+ self._queue.put(_CLOSE_SENTINEL)
+ except QueueOverflow:
+ pass
+
+ async def __aiter__(self):
+ try:
+ while not self._closed:
+ elem = await self._queue.get()
+ if elem is _CLOSE_SENTINEL:
+ return
+ yield elem
+ finally:
+ self._closed = True
+
+ def __str__(self) -> str:
+ return f'{self.__class__.__name__}(buffer={len(self._queue.pending)}/{self._queue.size}, closed={self._closed})'
+
+
+class CDPSocket(WebSocketClientProtocol):
+
+ @property
+ def closed(self) -> bool:
+ return self.localCloseCode is not None or self.remoteCloseCode is not None
+
+ def onConnect(self, response):
+ self.factory.connection = self
+ self.factory.connectWaiter.callback(None)
+
+ def onCloseFrame(self, code, reasonRaw):
+ return super().onCloseFrame(code, reasonRaw)
+
+ async def close(self):
+ self.dropConnection()
+ await self.is_closed
+
+
+class CDPConnector(WebSocketClientFactory):
+ protocol = CDPSocket
+
+ def startedConnecting(self, connector):
+ self.connectWaiter = Deferred()
+
+ def clientConnectionFailed(self, connector, reason):
+ self.connectWaiter.errback(CDPError(f'CDP connection failed: {reason}'))
+
+
+class CDPBase(LoggerMixin):
+
+ def __init__(self, ws: CDPSocket=None, session_id=None, target_id=None):
+ super().__init__()
+ self._listeners: t.Dict[type, t.Set[CDPEventListener]] = defaultdict(set)
+ self._id_iter = itertools.count()
+ self._inflight_cmd: t.Dict[int, t.Tuple[t.Generator[dict, dict , t.Any], Deferred]] = {}
+ self._session_id = session_id
+ self._target_id = target_id
+ self._ws: CDPSocket = ws
+
+ @property
+ def session_id(self) -> cdp.target.SessionID:
+ return self._session_id
+
+ async def execute(self, cmd: t.Generator[dict, dict , T]) -> T:
+ '''
+ Execute a command on the server and wait for the result.
+
+ :param cmd: any CDP command
+ :returns: a CDP result
+ '''
+ if self._ws.closedByMe:
+ raise CDPConnectionClosed(f'{self._ws.localCloseReason} ({self._ws.localCloseCode})')
+ if self._ws.remoteCloseCode is not None:
+ raise CDPConnectionClosed(f'{self._ws.remoteCloseReason} ({self._ws.remoteCloseCode})')
+ cmd_id = next(self._id_iter)
+ cmd_response = Deferred()
+ self._inflight_cmd[cmd_id] = cmd, cmd_response
+ request = next(cmd)
+ request['id'] = cmd_id
+ if self._session_id:
+ request['sessionId'] = self._session_id
+ self._logger.debug('sending command %r', request)
+ request_str = json.dumps(request)
+ try:
+ self._ws.sendMessage(request_str.encode('UTF-8'))
+ return await cmd_response
+ except CancelledError:
+ if cmd_id in self._inflight_cmd:
+ del self._inflight_cmd[cmd_id]
+ raise
+
+ def listen(self, *event_types: t.Type[T], buffer_size=100) -> t.AsyncIterator[T]:
+ '''Return an async iterator that iterates over events matching the
+ indicated types.'''
+ receiver = CDPEventListener(DeferredQueue(buffer_size))
+ for event_type in event_types:
+ self._listeners[event_type].add(receiver)
+ return receiver.__aiter__()
+
+ @asynccontextmanager
+ async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGenerator[T, None]:
+ '''
+ Wait for an event of the given type and return it.
+
+ This is an async context manager, so you should open it inside an async
+ with block. The block will not exit until the indicated event is
+ received.
+ '''
+ async for event in self.listen(event_type, buffer_size):
+ yield event
+ return
+
+ def close_listeners(self):
+ for listener in itertools.chain.from_iterable(self._listeners.values()):
+ listener.close()
+ self._listeners.clear()
+
+ def _handle_data(self, data):
+ '''
+ Handle incoming WebSocket data.
+
+ :param dict data: a JSON dictionary
+ '''
+ if 'id' in data:
+ self._handle_cmd_response(data)
+ else:
+ self._handle_event(data)
+
+ def _handle_cmd_response(self, data):
+ '''
+ Handle a response to a command. This will set an event flag that will
+ return control to the task that called the command.
+
+ :param dict data: response as a JSON dictionary
+ '''
+ cmd_id = data['id']
+ try:
+ cmd, event = self._inflight_cmd.pop(cmd_id)
+ except KeyError:
+ self._logger.debug('got a message with a command ID that does not exist: %s', data)
+ return
+ if 'error' in data:
+ # If the server reported an error, convert it to an exception and do
+ # not process the response any further.
+ event.errback(CDPBrowserError(data['error']))
+ else:
+ # Otherwise, continue the generator to parse the JSON result
+ # into a CDP object.
+ try:
+ cmd.send(data['result'])
+ event.errback(CDPInternalError("the command's generator function did not exit when expected!"))
+ except StopIteration as e:
+ event.callback(e.value)
+
+ def _handle_event(self, data):
+ '''
+ Handle an event.
+
+ :param dict data: event as a JSON dictionary
+ '''
+ event = cdp.util.parse_json_event(data)
+ self._logger.debug('dispatching event %s', event)
+ to_remove = set()
+ for listener in self._listeners[type(event)]:
+ try:
+ listener.put(event)
+ except QueueOverflow:
+ self._logger.warning('event %s dropped because listener %s queue is full', type(event), listener)
+ except CDPEventListenerClosed:
+ to_remove.add(listener)
+ self._listeners[type(event)] -= to_remove
+ self._logger.debug('event dispatched')
+
+
+class CDPConnection(CDPBase):
+
+ def __init__(self, debugging_url: str, http_client: Agent, reactor):
+ super().__init__()
+ self._debugging_url = debugging_url.rstrip('/')
+ self._http_client = http_client
+ self._reactor = reactor
+ self._wsurl: str = None
+ self._sessions: t.Dict[str, CDPSession] = {}
+
+ @property
+ def closed(self) -> bool:
+ return self._ws.closed
+
+ @property
+ def had_normal_closure(self) -> bool:
+ return not self._ws.remoteCloseCode or (self._ws.closedByMe and self._ws.localCloseCode == 1000)
+
+ async def connect(self):
+ if self._ws is not None: raise RuntimeError('already connected')
+ if self._wsurl is None:
+ if self._debugging_url.startswith('http://'):
+ version: Response = await self._http_client.request(
+ b'GET',
+ b'%s/json/version' % self._debugging_url.encode('UTF-8')
+ )
+ if version.code != 200:
+ raise CDPError(f'could not get {self._debugging_url}/json/version: HTTP {version.code} {version.phrase})')
+ self._wsurl = json.loads(await readBody(version))['webSocketDebuggerUrl']
+ elif self._debugging_url.startswith('ws://'):
+ self._wsurl = self._debugging_url
+ else:
+ raise ValueError('bad debugging URL scheme')
+ connector = CDPConnector(self._wsurl)
+ self._reactor.connectTCP(connector.host, connector.port, connector)
+ await connector.connectWaiter
+ self._ws = connector.connection
+ self._ws.onMessage = self._handleMessage
+
+ def add_session(self, session_id: str, target_id: str) -> 'CDPSession':
+ if session_id is self._sessions:
+ return self._sessions[session_id]
+ session = CDPSession(self._ws, session_id, target_id)
+ self._sessions[session_id] = session
+ return session
+
+ def remove_session(self, session_id: str):
+ if session_id in self._sessions:
+ self._sessions.pop(session_id).close()
+
+ async def connect_session(self, target_id: cdp.target.TargetID) -> 'CDPSession':
+ '''
+ Returns a new :class:`CDPSession` connected to the specified target.
+ '''
+ session_id = await self.execute(cdp.target.attach_to_target(target_id, True))
+ session = CDPSession(self._ws, session_id, target_id)
+ self._sessions[session_id] = session
+ return session
+
+ def _handleMessage(self, message: bytes, isBinary: bool):
+ if isBinary: raise RuntimeError('unexpected binary ws message')
+ try:
+ data = json.loads(message)
+ except json.JSONDecodeError:
+ raise CDPBrowserError({
+ 'code': -32700,
+ 'message': 'Client received invalid JSON',
+ 'data': message
+ })
+ if 'sessionId' in data:
+ session_id = cdp.target.SessionID(data['sessionId'])
+ try:
+ session = self._sessions[session_id]
+ except KeyError:
+ self._logger.debug(f'received message for unknown session: {data}')
+ session._handle_data(data)
+ else:
+ self._handle_data(data)
+
+ async def close(self):
+ for session in self._sessions.values():
+ session.close()
+ self._sessions.clear()
+ self.close_listeners()
+ if self._ws is not None and not self._ws.closed:
+ await self._ws.close()
+
+
+class CDPSession(CDPBase, ContextLoggerMixin):
+ def __init__(self, ws: CDPSocket, session_id: cdp.target.SessionID, target_id: cdp.target.TargetID):
+ super().__init__(ws, session_id, target_id)
+ self.set_logger_context(extra_name=session_id)
+
+ def close(self):
+ if len(self._inflight_cmd) > 0:
+ exc = CDPSessionClosed()
+ for (_, event) in self._inflight_cmd.values():
+ if not event.done():
+ event.set_exception(exc)
+ self._inflight_cmd.clear()
+ self.close_listeners()
+
+
+async def connect_cdp(url: str, reactor) -> CDPConnection:
+ cdp_conn = CDPConnection(url, Agent(reactor), reactor)
+ await cdp_conn.connect()
+ return cdp_conn
From 8fa98ab4c07f06bb9acb14065c1f8762ba2d87b2 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 9 Jul 2022 17:14:38 -0300
Subject: [PATCH 48/81] add chrome launcher
---
.gitignore | 3 +-
README.md | 47 ++++++-------
pycdp/browser.py | 167 +++++++++++++++++++++++++++++++++++++++++++++++
3 files changed, 194 insertions(+), 23 deletions(-)
create mode 100644 pycdp/browser.py
diff --git a/.gitignore b/.gitignore
index f97a05d..d982227 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,4 +9,5 @@ __pycache__
/.pytest_cache
/.vscode
/.tool-versions
-/test_*
\ No newline at end of file
+/test_*
+/*.log
\ No newline at end of file
diff --git a/README.md b/README.md
index 8c4c589..06dfc6c 100644
--- a/README.md
+++ b/README.md
@@ -27,44 +27,47 @@ If all you want is automate Chrome right now, PyCDP includes a low-level client
```python
import asyncio
from pycdp import cdp
+from pycdp.browser import ChromeLauncher
from pycdp.asyncio import connect_cdp
-async def listen_request_responses(target_session):
- async for event in target_session.listen(cdp.network.ResponseReceived):
- # runs for each new event
- print(event)
-
-async def listen_websocket_message(target_session):
- async with target_session.wait_for(cdp.network.WebSocketFrameReceived) as event:
- # wait_for() is the same as listen() but it's fired a single time only
- print("this is fired a single time only")
-
async def main():
+ chrome = ChromeLauncher(
+ binary='/usr/bin/google-chrome' # linux path
+ args=['--remote-debugging-port=9222', '--incognito']
+ )
+ # ChromeLauncher.launch() is blocking, run it on a background thread
+ await asyncio.get_running_loop().run_in_executor(None, chrome.launch)
conn = await connect_cdp('http://localhost:9222')
target_id = await conn.execute(cdp.target.create_target('about:blank'))
target_session = await conn.connect_session(target_id)
- await target_session.execute(cdp.network.enable())
+ await target_session.execute(cdp.page.enable())
await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
- tasks = []
- try:
- # each event listener should run on its own task
- tasks.append(asyncio.create_task(listen_request_responses(target_session)))
- tasks.append(asyncio.create_task(listen_websocket_message(target_session)))
- await asyncio.gather(*tasks)
- finally:
- await target_session.execute(cdp.page.close())
+ # you may use "async for target_session.listen()" to listen multiple events, here we listen just a single event.
+ async with target_session.wait_for(cdp.page.DomContentEventFired):
+ dom = await target_session.execute(cdp.dom.get_document())
+ node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p'))
+ js_node = await target_session.execute(cdp.dom.resolve_node(node))
+ print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value)
+ await target_session.execute(cdp.page.close())
+ await conn.close()
+ await asyncio.get_running_loop().run_in_executor(None, chrome.kill)
asyncio.run(main())
```
the twisted client requires [twisted][6] and [autobahn][7] packages:
```python
from twisted.python.log import err
-from twisted.internet import reactor, defer
+from twisted.internet import reactor, defer, threads
from pycdp import cdp
+from pycdp.browser import ChromeLauncher
from pycdp.twisted import connect_cdp
-
async def main():
+ chrome = ChromeLauncher(
+ binary='C:\Program Files\Google\Chrome\Application\chrome.exe', # windows path
+ args=['--remote-debugging-port=9222', '--incognito']
+ )
+ await threads.deferToThread(chrome.launch)
conn = await connect_cdp('http://localhost:9222', reactor)
target_id = await conn.execute(cdp.target.create_target('about:blank'))
target_session = await conn.connect_session(target_id)
@@ -77,7 +80,7 @@ async def main():
print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value)
await target_session.execute(cdp.page.close())
await conn.close()
-
+ await threads.deferToThread(chrome.kill)
def main_error(failure):
err(failure)
diff --git a/pycdp/browser.py b/pycdp/browser.py
new file mode 100644
index 0000000..9c22e0c
--- /dev/null
+++ b/pycdp/browser.py
@@ -0,0 +1,167 @@
+import os
+import signal
+import shutil
+import tempfile
+import subprocess
+import typing as t
+from io import TextIOWrapper
+from pycdp.utils import LoggerMixin
+
+
+class BrowserLauncher(LoggerMixin):
+
+ def __init__(
+ self,
+ *,
+ binary: str,
+ profile: str=None,
+ keep_profile: bool=True,
+ headless: bool=False,
+ locale: str=None,
+ timezone: str=None,
+ proxy: str=None,
+ window_width: int=None,
+ window_height: int=None,
+ initial_url: str=None,
+ extensions: t.List[str]=[],
+ args: t.List[str]=None,
+ log: bool=True
+ ):
+ super().__init__()
+ self._binary = binary
+ self._headless = headless
+ self._locale = locale
+ self._timezone = timezone
+ self._proxy = proxy
+ self._window_width = window_width
+ self._window_height = window_height
+ self._extensions = extensions
+ self._initial_url = initial_url
+ self._args = args
+ self._log = log
+ self._process: subprocess.Popen = None
+ if profile is None:
+ self._keep_profile = False
+ self._profile = None
+ else:
+ self._profile = profile
+ self._keep_profile = keep_profile
+ self._logfile: TextIOWrapper = None
+
+ @property
+ def pid(self) -> int:
+ return self._process.pid
+
+ @property
+ def locale(self):
+ return self._locale
+
+ @property
+ def timezone(self):
+ return self._timezone
+
+ def launch(self):
+ if self._process is not None: raise RuntimeError('already launched')
+ if self._log:
+ self._logfile = open(f'{self.__class__.__name__.lower()}.log', 'a')
+ stdout = stderr = self._logfile
+ self._logger.debug('redirecting output to %s.log', self.__class__.__name__.lower())
+ else:
+ stdout = stderr = subprocess.DEVNULL
+ self._logger.debug('redirecting output to subprocess.DEVNULL')
+ if self._profile is None:
+ self._profile = tempfile.mkdtemp()
+ self._configure_profile()
+ cmd = self._build_launch_cmdline()
+ self._logger.debug('launching %s', cmd)
+ self._process = subprocess.Popen(
+ cmd,
+ env=self._build_launch_env(),
+ stdin=subprocess.PIPE,
+ stdout=stdout,
+ stderr=stderr,
+ text=True,
+ close_fds=True,
+ preexec_fn=os.setsid if os.name == 'posix' else None,
+ creationflags=subprocess.CREATE_NEW_PROCESS_GROUP if os.name == 'nt' else 0
+ )
+ try:
+ self._logger.debug('waiting launch finish...')
+ returncode = self._process.wait(1)
+ except subprocess.TimeoutExpired:
+ self._logger.debug('launch finished')
+
+ def kill(self, timeout: float=3.0):
+ if self._process is not None:
+ if os.name == 'posix':
+ os.killpg(os.getpgid(self._process.pid), signal.SIGTERM)
+ else:
+ self._process.terminate()
+ try:
+ self._process.wait(timeout)
+ except subprocess.TimeoutExpired:
+ if os.name == 'posix':
+ os.killpg(os.getpgid(self._process.pid), signal.SIGKILL)
+ else:
+ self._process.kill()
+ self._process = None
+ if self._logfile is not None and not self._logfile.closed:
+ self._logfile.close()
+ if not self._keep_profile:
+ shutil.rmtree(self._profile, ignore_errors=True)
+
+ def _build_launch_cmdline(self) -> t.List[str]:
+ raise NotImplementedError
+
+ def _build_launch_env(self):
+ env = os.environ.copy()
+ if os.name == 'posix':
+ if self._timezone is not None:
+ env['TZ'] = self._timezone
+ if self._locale is not None:
+ env['LANGUAGE'] = self._locale
+ return env
+
+ def _configure_profile(self):
+ pass
+
+
+class ChromeLauncher(BrowserLauncher):
+
+ def _build_launch_cmdline(self) -> t.List[str]:
+ cmd = [
+ self._binary,
+ f'--window-size={self._window_width},{self._window_height}' if self._window_width is not None and self._window_height is not None else '--start-maximized',
+ f'--user-data-dir={self._profile}' if self._profile is not None else '',
+ '--no-first-run',
+ '--no-service-autorun',
+ '--no-default-browser-check',
+ '--homepage=about:blank',
+ '--no-pings',
+ '--password-store=basic',
+ '--disable-infobars',
+ '--disable-breakpad',
+ '--disable-component-update',
+ '--disable-background-timer-throttling',
+ '--disable-backgrounding-occluded-windows',
+ '--disable-renderer-backgrounding',
+ '--disable-background-networking',
+ '--disable-dev-shm-usage'
+ ]
+ if os.name == 'posix':
+ cmd.append('--enable-logging')
+ cmd.append('--v=2')
+ if self._headless:
+ cmd.append('--headless')
+ cmd.append('--disable-gpu')
+ if self._proxy is not None:
+ cmd.append(f'--proxy-server={self._proxy}')
+ if len(self._extensions) > 0:
+ cmd.append(f"--load-extension={','.join(str(path) for path in self._extensions)}")
+ if os.name == 'nt' and self._locale is not None:
+ cmd.append(f'--lang={self._locale}')
+ if self._args is not None:
+ cmd.extend(self._args)
+ if self._initial_url is not None:
+ cmd.append(self._initial_url)
+ return cmd
From 88df54b1d3aa94f9f58025526687900a79e498c4 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 31 Jul 2022 13:30:35 -0300
Subject: [PATCH 49/81] add retry to twisted CDP connect
---
pycdp/twisted.py | 3 ++-
pyproject.toml | 2 +-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/pycdp/twisted.py b/pycdp/twisted.py
index a5961bc..19dca1c 100644
--- a/pycdp/twisted.py
+++ b/pycdp/twisted.py
@@ -7,7 +7,7 @@
from twisted.internet.defer import DeferredQueue, QueueOverflow, Deferred, CancelledError
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
from pycdp.exceptions import *
-from pycdp.utils import ContextLoggerMixin, LoggerMixin
+from pycdp.utils import ContextLoggerMixin, LoggerMixin, retry_on
from pycdp import cdp
@@ -222,6 +222,7 @@ def closed(self) -> bool:
def had_normal_closure(self) -> bool:
return not self._ws.remoteCloseCode or (self._ws.closedByMe and self._ws.localCloseCode == 1000)
+ @retry_on(ConnectionRefusedError, retries=10, delay=1.0, log_errors=True)
async def connect(self):
if self._ws is not None: raise RuntimeError('already connected')
if self._wsurl is None:
diff --git a/pyproject.toml b/pyproject.toml
index ccef09e..a6561b7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "python-cdp"
-version = "1.1.0"
+version = "1.2.1"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
packages = [
{include = "pycdp"}
From 85269a8238aad4e2dde71f64f147fed250235f8f Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 31 Jul 2022 13:37:08 -0300
Subject: [PATCH 50/81] fix wrong ConnectionRefusedError ref
---
pycdp/twisted.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/pycdp/twisted.py b/pycdp/twisted.py
index 19dca1c..80ca9e0 100644
--- a/pycdp/twisted.py
+++ b/pycdp/twisted.py
@@ -3,6 +3,7 @@
import typing as t
from collections import defaultdict
from contextlib import asynccontextmanager
+from twisted.internet.error import ConnectionRefusedError
from twisted.web.client import Agent, Response, readBody
from twisted.internet.defer import DeferredQueue, QueueOverflow, Deferred, CancelledError
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
From bedde6ccf87964cc695da861bc1b7a93e40c0f51 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 31 Jul 2022 13:45:27 -0300
Subject: [PATCH 51/81] fix error dispatch on twisted client
---
pycdp/twisted.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pycdp/twisted.py b/pycdp/twisted.py
index 80ca9e0..9677369 100644
--- a/pycdp/twisted.py
+++ b/pycdp/twisted.py
@@ -303,8 +303,8 @@ def close(self):
if len(self._inflight_cmd) > 0:
exc = CDPSessionClosed()
for (_, event) in self._inflight_cmd.values():
- if not event.done():
- event.set_exception(exc)
+ if not event.called:
+ event.errback(exc)
self._inflight_cmd.clear()
self.close_listeners()
From 200ba85d5cfa03c6ea571012f6639c2a0157b171 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 31 Jul 2022 14:57:40 -0300
Subject: [PATCH 52/81] make retry_on compatible with twisted
---
pycdp/asyncio.py | 13 +++++++++++--
pycdp/base.py | 8 ++++++++
pycdp/twisted.py | 17 ++++++++++++++++-
pycdp/utils.py | 24 ++++++++++++++++++------
4 files changed, 53 insertions(+), 9 deletions(-)
create mode 100644 pycdp/base.py
diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py
index b21ecac..14323e9 100644
--- a/pycdp/asyncio.py
+++ b/pycdp/asyncio.py
@@ -12,6 +12,7 @@
ClientResponseError, ClientConnectorError, ClientConnectionError, ServerDisconnectedError
)
from pycdp.exceptions import *
+from pycdp.base import IEventLoop
from pycdp.utils import ContextLoggerMixin, LoggerMixin, SingleTaskWorker, retry_on
from pycdp import cdp
@@ -19,6 +20,14 @@
T = t.TypeVar('T')
+class AsyncIOEventLoop(IEventLoop):
+
+ async def sleep(self, delay: float) -> None:
+ await asyncio.sleep(delay)
+
+loop = AsyncIOEventLoop()
+
+
_CLOSE_SENTINEL = object
class CDPEventListener:
@@ -214,7 +223,7 @@ def had_normal_closure(self) -> bool:
@retry_on(
ClientConnectorError, asyncio.TimeoutError,
- retries=10, delay=3.0, delay_growth=1.3, log_errors=True
+ retries=10, delay=3.0, delay_growth=1.3, log_errors=True, loop=loop
)
async def connect(self):
if self._ws is not None: raise RuntimeError('already connected')
@@ -366,7 +375,7 @@ def close(self):
self.close_listeners()
-@retry_on(ClientConnectionError, ServerDisconnectedError, retries=10, delay=3.0, delay_growth=1.3, log_errors=True)
+@retry_on(ClientConnectionError, ServerDisconnectedError, retries=10, delay=3.0, delay_growth=1.3, log_errors=True, loop=loop)
async def connect_cdp(url: str) -> CDPConnection:
'''
Connect to the browser specified by debugging ``url``.
diff --git a/pycdp/base.py b/pycdp/base.py
new file mode 100644
index 0000000..01ee52c
--- /dev/null
+++ b/pycdp/base.py
@@ -0,0 +1,8 @@
+import typing as t
+
+
+class IEventLoop(t.Protocol):
+ """Compatibility layer between asyncio and twisted's event loop"""
+
+ async def sleep(self, delay: float) -> None:
+ raise NotImplementedError
diff --git a/pycdp/twisted.py b/pycdp/twisted.py
index 9677369..2b7241f 100644
--- a/pycdp/twisted.py
+++ b/pycdp/twisted.py
@@ -3,11 +3,13 @@
import typing as t
from collections import defaultdict
from contextlib import asynccontextmanager
+from twisted.internet import reactor
from twisted.internet.error import ConnectionRefusedError
from twisted.web.client import Agent, Response, readBody
from twisted.internet.defer import DeferredQueue, QueueOverflow, Deferred, CancelledError
from autobahn.twisted.websocket import WebSocketClientProtocol, WebSocketClientFactory
from pycdp.exceptions import *
+from pycdp.base import IEventLoop
from pycdp.utils import ContextLoggerMixin, LoggerMixin, retry_on
from pycdp import cdp
@@ -15,6 +17,19 @@
T = t.TypeVar('T')
+class TwistedEventLoop(IEventLoop):
+
+ def __init__(self, reactor):
+ self._reactor = reactor
+
+ async def sleep(self, delay: float):
+ sleep = Deferred()
+ self._reactor.callLater(delay, sleep.callback, None)
+ await sleep
+
+loop = TwistedEventLoop(reactor)
+
+
_CLOSE_SENTINEL = object
class CDPEventListener:
@@ -223,7 +238,7 @@ def closed(self) -> bool:
def had_normal_closure(self) -> bool:
return not self._ws.remoteCloseCode or (self._ws.closedByMe and self._ws.localCloseCode == 1000)
- @retry_on(ConnectionRefusedError, retries=10, delay=1.0, log_errors=True)
+ @retry_on(ConnectionRefusedError, retries=10, delay=1.0, log_errors=True, loop=loop)
async def connect(self):
if self._ws is not None: raise RuntimeError('already connected')
if self._wsurl is None:
diff --git a/pycdp/utils.py b/pycdp/utils.py
index fec9608..70a64b3 100644
--- a/pycdp/utils.py
+++ b/pycdp/utils.py
@@ -6,6 +6,7 @@
import functools
import typing as t
from types import SimpleNamespace, TracebackType
+from pycdp.base import IEventLoop
_T = t.TypeVar('_T')
@@ -46,6 +47,7 @@ class Retry(LoggerMixin):
def __init__(self,
func,
exception_class: t.Collection[BaseException],
+ loop: IEventLoop,
*,
retries: int = 1,
on_error: t.Union[str, t.Callable[[], t.Awaitable[None]]] = None,
@@ -54,6 +56,7 @@ def __init__(self,
super().__init__()
self._func = func
self._errors = exception_class
+ self._loop = loop
self._retries = retries
self._log_errors = log_errors
self._on_error_cb = on_error
@@ -121,7 +124,7 @@ async def _on_error(self, instance, context):
await super()._on_error(instance, context)
delay = self._get_delay(context)
if delay > 0.0:
- await asyncio.sleep(delay)
+ await self._loop.sleep(delay)
self._grow_delay(context)
def _create_call_context(self):
@@ -146,19 +149,27 @@ def _get_delay(self, context):
def retry_on(
*exception_class: t.Type[BaseException],
+ loop: IEventLoop,
retries: int = 1,
delay: t.Union[float, t.Tuple[float, float]] = 0.0,
delay_growth: float = 1.0,
max_delay: int = 600,
log_errors: bool = False,
- on_error: str = None
+ on_error: str = None,
):
if not isinstance(delay, (float, tuple)):
raise TypeError('delay must be a float or a tuple of 2 floats')
def deco_factory(func):
if type(delay) is float:
if delay <= 0.0:
- decorator = Retry(func, exception_class, retries=retries, log_errors=log_errors, on_error=on_error)
+ decorator = Retry(
+ func,
+ exception_class,
+ retries=retries,
+ log_errors=log_errors,
+ on_error=on_error,
+ loop=loop
+ )
else:
decorator = DelayedRetry(
delay,
@@ -168,7 +179,8 @@ def deco_factory(func):
exception_class=exception_class,
retries=retries,
log_errors=log_errors,
- on_error=on_error
+ on_error=on_error,
+ loop=loop
)
else:
decorator = RandomDelayedRetry(
@@ -179,7 +191,8 @@ def deco_factory(func):
exception_class=exception_class,
retries=retries,
log_errors=log_errors,
- on_error=on_error
+ on_error=on_error,
+ loop=loop
)
@functools.wraps(func)
async def func_wrapper(*args, **kwargs):
@@ -209,7 +222,6 @@ async def wait_closed(self):
async def close(self):
if self._closed:
- await asyncio.sleep(0)
return
elif self._closing:
await self._close_event.wait()
From 89aa2f0f60d18d4628af1c56c52b40a5299397d9 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Mon, 1 Aug 2022 13:49:05 -0300
Subject: [PATCH 53/81] fix required python version
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index a6561b7..b1a77c8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,7 +21,7 @@ authors = [
]
[tool.poetry.dependencies]
-python = "^3.7"
+python = "^3.8"
deprecated = "1.2.9"
inflection = "0.4.0"
aiohttp = "3.8.1"
From 9d4029f3fee9be08b8d000cddd4e0996618993af Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Elliot=20Br=C3=A4ck?=
Date: Wed, 3 Aug 2022 14:07:12 +0200
Subject: [PATCH 54/81] readme: Fix syntax error in example
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 06dfc6c..af99a72 100644
--- a/README.md
+++ b/README.md
@@ -32,7 +32,7 @@ from pycdp.asyncio import connect_cdp
async def main():
chrome = ChromeLauncher(
- binary='/usr/bin/google-chrome' # linux path
+ binary='/usr/bin/google-chrome', # linux path
args=['--remote-debugging-port=9222', '--incognito']
)
# ChromeLauncher.launch() is blocking, run it on a background thread
@@ -148,4 +148,4 @@ PyCDP is licensed under the MIT License.
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
[5]: https://python-poetry.org/docs/
[6]: https://pypi.org/project/Twisted/
-[7]: https://pypi.org/project/autobahn/
\ No newline at end of file
+[7]: https://pypi.org/project/autobahn/
From 4f217c4222b98a9d710ea20ee5d7b5655b6dd36b Mon Sep 17 00:00:00 2001
From: TurboKach
Date: Thu, 26 Jan 2023 01:26:55 +0800
Subject: [PATCH 55/81] added shell command to generate python types for latest
protocol version
---
.gitignore | 3 ++-
README.md | 8 +++++++-
update-cdp.sh | 17 +++++++++++++++++
3 files changed, 26 insertions(+), 2 deletions(-)
create mode 100755 update-cdp.sh
diff --git a/.gitignore b/.gitignore
index d982227..f06f256 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,4 +10,5 @@ __pycache__
/.vscode
/.tool-versions
/test_*
-/*.log
\ No newline at end of file
+/*.log
+.idea
\ No newline at end of file
diff --git a/README.md b/README.md
index af99a72..f81cb61 100644
--- a/README.md
+++ b/README.md
@@ -114,7 +114,13 @@ Example:
```sh
cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json --output /tmp/cdp
```
-You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
+You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
+
+Here you can find script that automatically downloads latest protocol files and generates python types
+```shell
+chmod +x update-cdp.sh
+./update-cdp.sh
+```
## Implementation of a CDP client
The `pycdp.cdp` package follows same structure of CDP domains, each domain is Python module and each command a function in that module.
diff --git a/update-cdp.sh b/update-cdp.sh
new file mode 100755
index 0000000..2b58e7f
--- /dev/null
+++ b/update-cdp.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then
+ rm -f devtools-protocol/*
+fi
+
+wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json
+if [ $? -ne 0 ]; then
+ echo "Error: Failed to download files"
+ exit 1
+fi
+
+cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output cdp/
+if [ $? -ne 0 ]; then
+ echo "Error: Failed to execute cdpgen"
+ exit 1
+fi
\ No newline at end of file
From e493c3c8d9dea0721bc337b75a843da61a4c4508 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 18 Mar 2023 14:47:50 -0300
Subject: [PATCH 56/81] update README
---
README.md | 16 ++++------------
pycdp/gen/generate.py | 2 +-
2 files changed, 5 insertions(+), 13 deletions(-)
diff --git a/README.md b/README.md
index 06dfc6c..d56cda3 100644
--- a/README.md
+++ b/README.md
@@ -8,12 +8,6 @@ sending JSON messages over a WebSocket. That JSON format is described by a
machine-readable specification. This specification is used to automatically
generate the classes and methods found in this library.
-You could write a CDP client by connecting a WebSocket and then sending JSON
-objects, but this would be tedious and error-prone: the Python interpreter would
-not catch any typos in your JSON objects, and you wouldn't get autocomplete for
-any parts of the JSON data structure. By providing a set of native Python
-wrappers, this project makes it easier and faster to write CDP client code.
-
## Installation
You can install this library as a dependency on your project with:
```
@@ -92,9 +86,7 @@ d.addCallback(lambda *args: reactor.stop())
reactor.run()
```
-where chrome debugger is listening on `http://localhost:9222` (started by `google-chrome --remote-debugging-port=9222`).
-
-You also can use just the builtin CDP types with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command:
+You also can use just the builtin CDP type wrappers with `import pycdp.cdp` on your own client implementation. If you want to try a different CDP version you can build new type wrappers with `cdpgen` command:
```
usage: cdpgen
@@ -108,7 +100,7 @@ optional arguments:
JSON file for the javascript protocol
--output OUTPUT output path for the generated Python modules
-JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol
+JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol/tree/master/json
```
Example:
```sh
@@ -142,10 +134,10 @@ For implementation details check out the [docs][3].
PyCDP is licensed under the MIT License.
-[1]: https://github.com/ChromeDevTools/devtools-protocol/
+[1]: https://chromedevtools.github.io/devtools-protocol/
[2]: https://github.com/ChromeDevTools/devtools-protocol/tree/1b1e643d77dacc9568b5acc1efdeaec19c048a27
[3]: docs/getting_started.rst
[4]: https://github.com/hyperiongray/trio-chrome-devtools-protocol
[5]: https://python-poetry.org/docs/
[6]: https://pypi.org/project/Twisted/
-[7]: https://pypi.org/project/autobahn/
\ No newline at end of file
+[7]: https://pypi.org/project/autobahn/
diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py
index 6ebf261..3b90ee0 100644
--- a/pycdp/gen/generate.py
+++ b/pycdp/gen/generate.py
@@ -1038,7 +1038,7 @@ def file_type(path: str):
parser = ArgumentParser(
usage='%(prog)s ',
description='Generate Python types for the Chrome Devtools Protocol (CDP) specification.',
- epilog='JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol'
+ epilog='JSON files for the CDP spec can be found at https://github.com/ChromeDevTools/devtools-protocol/tree/master/json'
)
parser.add_argument(
'--browser-protocol',
From c3257f9923de6cb2c2969fe7114532e97d750419 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 18 Mar 2023 15:12:44 -0300
Subject: [PATCH 57/81] fixes #5
---
README.md | 2 +-
pycdp/gen/generate.py | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 184d003..1005745 100644
--- a/README.md
+++ b/README.md
@@ -109,7 +109,7 @@ cdpgen --browser-protocol browser_protocol.json --js-protocol js_protocol.json -
You can then include the `/tmp/cdp` package in your project and import it like the builtin CDP types.
## Implementation of a CDP client
-The `pycdp.cdp` package follows same structure of CDP domains, each domain is Python module and each command a function in that module.
+The `pycdp.cdp` package follows same structure of CDP domains, each domain is a Python module and each command a function in that module.
Each function is a generator with a single yield which is a Python dict, on the CDP wire format,
containing the message that should be sent to the browser, on resumption the generator receives the message from browser:
diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py
index 3b90ee0..84594b8 100644
--- a/pycdp/gen/generate.py
+++ b/pycdp/gen/generate.py
@@ -2,6 +2,7 @@
import os
import json
import typing
+import shutil
import builtins
import logging
import operator
@@ -1074,6 +1075,7 @@ def file_type(path: str):
for domain in domains:
logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module)
(output / f'{domain.module}.py').write_text(domain.generate_code())
+ shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py')
generate_init(output / '__init__.py', domains)
(output / 'README.md').write_text(GENERATED_PACKAGE_NOTICE)
(output / 'py.typed').touch()
From 8c46551381a88d01c3f3daa0a3d4084c6464c7b3 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 18 Mar 2023 15:16:44 -0300
Subject: [PATCH 58/81] add generated package notice in own CDP wrappers
---
pycdp/gen/generate.py | 12 ++++--------
1 file changed, 4 insertions(+), 8 deletions(-)
diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py
index 84594b8..5544490 100644
--- a/pycdp/gen/generate.py
+++ b/pycdp/gen/generate.py
@@ -1020,14 +1020,10 @@ def selfgen():
with module_path.open('w') as module_file:
module_file.write(domain.generate_code())
- init_path = output_path / '__init__.py'
- generate_init(init_path, domains)
-
- docs_path = here.parent / 'docs' / 'api'
- generate_docs(docs_path, domains)
-
- py_typed_path = output_path / 'py.typed'
- py_typed_path.touch()
+ generate_init(output_path / '__init__.py', domains)
+ generate_docs(here.parent / 'docs' / 'api', domains)
+ (output_path / 'README.md').write_text(GENERATED_PACKAGE_NOTICE)
+ (output_path / 'py.typed').touch()
def cdpgen():
From 63d1554a24a97751ba49f3b9da6278cc9a858d79 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sat, 18 Mar 2023 15:31:59 -0300
Subject: [PATCH 59/81] add browser instance leak warning
---
pycdp/browser.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/pycdp/browser.py b/pycdp/browser.py
index 9c22e0c..bab421d 100644
--- a/pycdp/browser.py
+++ b/pycdp/browser.py
@@ -1,3 +1,4 @@
+import warnings
import os
import signal
import shutil
@@ -125,6 +126,10 @@ def _build_launch_env(self):
def _configure_profile(self):
pass
+ def __del__(self):
+ if self._process is not None:
+ warnings.warn('A BrowserLauncher instance has not closed with .kill(), it will leak')
+
class ChromeLauncher(BrowserLauncher):
From 85a20f4b1ab7c877a2aaa91b84629d359e69c7a2 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 19 Mar 2023 17:38:58 -0300
Subject: [PATCH 60/81] add safe event listener
---
README.md | 13 +++++++------
pycdp/asyncio.py | 26 +++++++++++++++++++++++---
2 files changed, 30 insertions(+), 9 deletions(-)
diff --git a/README.md b/README.md
index 1005745..d355379 100644
--- a/README.md
+++ b/README.md
@@ -35,13 +35,14 @@ async def main():
target_id = await conn.execute(cdp.target.create_target('about:blank'))
target_session = await conn.connect_session(target_id)
await target_session.execute(cdp.page.enable())
- await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
# you may use "async for target_session.listen()" to listen multiple events, here we listen just a single event.
- async with target_session.wait_for(cdp.page.DomContentEventFired):
- dom = await target_session.execute(cdp.dom.get_document())
- node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p'))
- js_node = await target_session.execute(cdp.dom.resolve_node(node))
- print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value)
+ with target_session.safe_wait_for(cdp.page.DomContentEventFired) as navigation:
+ await target_session.execute(cdp.page.navigate('https://chromedevtools.github.io/devtools-protocol/'))
+ await navigation
+ dom = await target_session.execute(cdp.dom.get_document())
+ node = await target_session.execute(cdp.dom.query_selector(dom.node_id, 'p'))
+ js_node = await target_session.execute(cdp.dom.resolve_node(node))
+ print((await target_session.execute(cdp.runtime.call_function_on('function() {return this.innerText;}', js_node.object_id, return_by_value=True)))[0].value)
await target_session.execute(cdp.page.close())
await conn.close()
await asyncio.get_running_loop().run_in_executor(None, chrome.kill)
diff --git a/pycdp/asyncio.py b/pycdp/asyncio.py
index 14323e9..52e942f 100644
--- a/pycdp/asyncio.py
+++ b/pycdp/asyncio.py
@@ -4,7 +4,7 @@
import itertools
import typing as t
from collections import defaultdict
-from contextlib import asynccontextmanager
+from contextlib import asynccontextmanager, contextmanager
from aiohttp import ClientSession
from aiohttp.client import ClientWebSocketResponse
from aiohttp.http_websocket import WSMsgType, WSCloseCode
@@ -118,7 +118,7 @@ def listen(self, *event_types: t.Type[T], buffer_size=100) -> t.AsyncIterator[T]
return receiver.__aiter__()
@asynccontextmanager
- async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGenerator[T, None]:
+ async def wait_for(self, event_type: t.Type[T]) -> t.AsyncGenerator[T, None]:
'''
Wait for an event of the given type and return it.
@@ -126,10 +126,30 @@ async def wait_for(self, event_type: t.Type[T], buffer_size=100) -> t.AsyncGener
with block. The block will not exit until the indicated event is
received.
'''
- async for event in self.listen(event_type, buffer_size):
+ async for event in self.listen(event_type, buffer_size=2):
yield event
return
+ @contextmanager
+ def safe_wait_for(self, event_type: t.Type[T]) -> t.Generator[t.Awaitable[T], None]:
+ """
+ Wait for an asynchronous event. This context manager yields a awaitable that should be
+ awaited to receive the event.
+
+ Use this context manager to register an event listener before performing the action which will
+ trigger the event like a page navigation, it avoids the race conditions of wait_for().
+ """
+ aevent = asyncio.create_task(self._async_wait_for(event_type))
+ try:
+ yield aevent
+ finally:
+ if not aevent.done():
+ aevent.cancel()
+
+ async def _async_wait_for(self, event_type: t.Type[T]) -> T:
+ async for event in self.listen(event_type, buffer_size=2):
+ return event
+
def close_listeners(self):
for listener in itertools.chain.from_iterable(self._listeners.values()):
listener.close()
From 929680fac065f24229fdf9aac8a7887008d1f025 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Sun, 19 Mar 2023 17:40:08 -0300
Subject: [PATCH 61/81] bump version to 1.3.0
---
pyproject.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pyproject.toml b/pyproject.toml
index b1a77c8..dbc1529 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "python-cdp"
-version = "1.2.1"
+version = "1.3.0"
description = "Python type wrappers for Chrome DevTools Protocol (CDP)"
packages = [
{include = "pycdp"}
From 42873dd9e5d76da3ae50e33ab76492f5390d94fa Mon Sep 17 00:00:00 2001
From: TurboKach
Date: Mon, 20 Mar 2023 19:31:56 +0800
Subject: [PATCH 62/81] updated packages
---
.gitignore | 3 +-
poetry.lock | 125 ++++++++++++++++++----------------------------------
2 files changed, 44 insertions(+), 84 deletions(-)
diff --git a/.gitignore b/.gitignore
index f06f256..12ce418 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,4 +11,5 @@ __pycache__
/.tool-versions
/test_*
/*.log
-.idea
\ No newline at end of file
+.idea
+.DS_Store
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index acea45d..3a2d76e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -9,16 +9,14 @@ python-versions = ">=3.6"
[package.dependencies]
aiosignal = ">=1.1.2"
async-timeout = ">=4.0.0a3,<5.0"
-asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""}
attrs = ">=17.3.0"
charset-normalizer = ">=2.0,<3.0"
frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0"
-typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
yarl = ">=1.0,<2.0"
[package.extras]
-speedups = ["aiodns", "brotli", "cchardet"]
+speedups = ["Brotli", "aiodns", "cchardet"]
[[package]]
name = "aiosignal"
@@ -47,17 +45,6 @@ category = "main"
optional = false
python-versions = ">=3.6"
-[package.dependencies]
-typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""}
-
-[[package]]
-name = "asynctest"
-version = "0.13.0"
-description = "Enhance the standard unittest package with features for testing asyncio libraries"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
[[package]]
name = "atomicwrites"
version = "1.4.0"
@@ -75,10 +62,10 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
+dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
[[package]]
name = "babel"
@@ -108,7 +95,7 @@ optional = false
python-versions = ">=3.5.0"
[package.extras]
-unicode_backport = ["unicodedata2"]
+unicode-backport = ["unicodedata2"]
[[package]]
name = "colorama"
@@ -130,7 +117,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["tox", "bumpversion (<1)", "sphinx (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
+dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bumpversion (<1)", "sphinx (<2)", "tox"]
[[package]]
name = "docutils"
@@ -164,23 +151,6 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-[[package]]
-name = "importlib-metadata"
-version = "4.11.3"
-description = "Read metadata from Python packages"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
-perf = ["ipython"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
-
[[package]]
name = "inflection"
version = "0.4.0"
@@ -270,9 +240,6 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-[package.dependencies]
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
-
[package.extras]
dev = ["pre-commit", "tox"]
@@ -315,7 +282,6 @@ python-versions = ">=3.5"
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=17.4.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
-importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
more-itertools = ">=4.0.0"
packaging = "*"
pluggy = ">=0.12,<1.0"
@@ -350,7 +316,20 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
+
+[[package]]
+name = "setuptools"
+version = "65.7.0"
+description = "Easily download, build, install, upgrade, and uninstall Python packages"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
+testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "snowballstemmer"
@@ -378,6 +357,7 @@ Jinja2 = ">=2.3"
packaging = "*"
Pygments = ">=2.0"
requests = ">=2.5.0"
+setuptools = "*"
snowballstemmer = ">=1.1"
sphinxcontrib-applehelp = "*"
sphinxcontrib-devhelp = "*"
@@ -388,8 +368,8 @@ sphinxcontrib-serializinghtml = "*"
[package.extras]
docs = ["sphinxcontrib-websupport"]
-lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"]
-test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"]
+lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.800)"]
+test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"]
[[package]]
name = "sphinx-autodoc-typehints"
@@ -403,8 +383,8 @@ python-versions = ">=3.6"
Sphinx = ">=3.0"
[package.extras]
-test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "Sphinx (>=3.2.0)", "dataclasses"]
-type_comments = ["typed-ast (>=1.4.0)"]
+test = ["Sphinx (>=3.2.0)", "dataclasses", "pytest (>=3.1.0)", "sphobjinv (>=2.0)", "typing-extensions (>=3.5)"]
+type-comments = ["typed-ast (>=1.4.0)"]
[[package]]
name = "sphinx-rtd-theme"
@@ -426,7 +406,7 @@ optional = false
python-versions = ">=3.5"
[package.extras]
-lint = ["flake8", "mypy", "docutils-stubs"]
+lint = ["docutils-stubs", "flake8", "mypy"]
test = ["pytest"]
[[package]]
@@ -438,7 +418,7 @@ optional = false
python-versions = ">=3.5"
[package.extras]
-lint = ["flake8", "mypy", "docutils-stubs"]
+lint = ["docutils-stubs", "flake8", "mypy"]
test = ["pytest"]
[[package]]
@@ -450,8 +430,8 @@ optional = false
python-versions = ">=3.6"
[package.extras]
-lint = ["flake8", "mypy", "docutils-stubs"]
-test = ["pytest", "html5lib"]
+lint = ["docutils-stubs", "flake8", "mypy"]
+test = ["html5lib", "pytest"]
[[package]]
name = "sphinxcontrib-jsmath"
@@ -462,7 +442,7 @@ optional = false
python-versions = ">=3.5"
[package.extras]
-test = ["pytest", "flake8", "mypy"]
+test = ["flake8", "mypy", "pytest"]
[[package]]
name = "sphinxcontrib-qthelp"
@@ -473,7 +453,7 @@ optional = false
python-versions = ">=3.5"
[package.extras]
-lint = ["flake8", "mypy", "docutils-stubs"]
+lint = ["docutils-stubs", "flake8", "mypy"]
test = ["pytest"]
[[package]]
@@ -485,7 +465,7 @@ optional = false
python-versions = ">=3.5"
[package.extras]
-lint = ["flake8", "mypy", "docutils-stubs"]
+lint = ["docutils-stubs", "flake8", "mypy"]
test = ["pytest"]
[[package]]
@@ -500,7 +480,7 @@ python-versions = "*"
name = "typing-extensions"
version = "4.1.1"
description = "Backported and Experimental Type Hints for Python 3.6+"
-category = "main"
+category = "dev"
optional = false
python-versions = ">=3.6"
@@ -513,8 +493,8 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
[package.extras]
-brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
-secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
@@ -544,24 +524,11 @@ python-versions = ">=3.6"
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
-typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""}
-
-[[package]]
-name = "zipp"
-version = "3.7.0"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
[metadata]
lock-version = "1.1"
-python-versions = "^3.7"
-content-hash = "ed29ffc1133f17161446637668c63a01554ba204abab6d4388f9da50df66b182"
+python-versions = "^3.8"
+content-hash = "f01943eaad90b858f6366f55b6c2d4eab02e20e85fc4df4447611a3387a152a3"
[metadata.files]
aiohttp = [
@@ -650,10 +617,6 @@ async-timeout = [
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
{file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
]
-asynctest = [
- {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
- {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
-]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
@@ -755,10 +718,6 @@ imagesize = [
{file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"},
{file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"},
]
-importlib-metadata = [
- {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"},
- {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"},
-]
inflection = [
{file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"},
{file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"},
@@ -926,6 +885,10 @@ requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
]
+setuptools = [
+ {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"},
+ {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"},
+]
snowballstemmer = [
{file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
{file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
@@ -1150,7 +1113,3 @@ yarl = [
{file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"},
{file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"},
]
-zipp = [
- {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"},
- {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"},
-]
From 3ca893fa755cb6877058fee0af92353466f6cd08 Mon Sep 17 00:00:00 2001
From: Heraldo Lucena <23155511+HMaker@users.noreply.github.com>
Date: Fri, 21 Apr 2023 14:18:45 -0300
Subject: [PATCH 63/81] fix docs generator
---
docs/api/audits.rst | 13 +++--
docs/api/css.rst | 34 +++++++++++
docs/api/debugger.rst | 9 +++
docs/api/dom.rst | 17 ++++++
docs/api/dom_storage.rst | 5 ++
docs/api/emulation.rst | 4 ++
docs/api/headless_experimental.rst | 9 +--
docs/api/media.rst | 5 ++
docs/api/network.rst | 5 ++
docs/api/page.rst | 14 +++++
docs/api/profiler.rst | 21 -------
docs/api/runtime.rst | 7 +++
docs/api/storage.rst | 90 ++++++++++++++++++++++++++++++
docs/api/system_info.rst | 2 +
docs/api/target.rst | 10 ++++
docs/api/web_authn.rst | 16 +++++-
pycdp/gen/generate.py | 2 +-
17 files changed, 228 insertions(+), 35 deletions(-)
diff --git a/docs/api/audits.rst b/docs/api/audits.rst
index 0454630..9913afb 100644
--- a/docs/api/audits.rst
+++ b/docs/api/audits.rst
@@ -34,22 +34,22 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: SameSiteCookieExclusionReason
+.. autoclass:: CookieExclusionReason
:members:
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: SameSiteCookieWarningReason
+.. autoclass:: CookieWarningReason
:members:
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: SameSiteCookieOperation
+.. autoclass:: CookieOperation
:members:
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: SameSiteCookieIssueDetails
+.. autoclass:: CookieIssueDetails
:members:
:undoc-members:
:exclude-members: from_json, to_json
@@ -174,6 +174,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: BounceTrackingIssueDetails
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: ClientHintIssueReason
:members:
:undoc-members:
diff --git a/docs/api/css.rst b/docs/api/css.rst
index c68e7fe..c6cce76 100644
--- a/docs/api/css.rst
+++ b/docs/api/css.rst
@@ -44,6 +44,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: InheritedPseudoElementMatches
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: RuleMatch
:members:
:undoc-members:
@@ -124,6 +129,21 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CSSScope
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CSSLayer
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CSSLayerData
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: PlatformFontUsage
:members:
:undoc-members:
@@ -139,6 +159,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: CSSTryRule
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CSSPositionFallbackRule
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: CSSKeyframesRule
:members:
:undoc-members:
@@ -184,6 +214,8 @@ to. For more information, see
.. autofunction:: get_inline_styles_for_node
+.. autofunction:: get_layers_for_node
+
.. autofunction:: get_matched_styles_for_node
.. autofunction:: get_media_queries
@@ -204,6 +236,8 @@ to. For more information, see
.. autofunction:: set_rule_selector
+.. autofunction:: set_scope_text
+
.. autofunction:: set_style_sheet_text
.. autofunction:: set_style_texts
diff --git a/docs/api/debugger.rst b/docs/api/debugger.rst
index caa81c3..42984e3 100644
--- a/docs/api/debugger.rst
+++ b/docs/api/debugger.rst
@@ -63,6 +63,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: WasmDisassemblyChunk
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: ScriptLanguage
:members:
:undoc-members:
@@ -89,6 +94,8 @@ to. For more information, see
.. autofunction:: disable
+.. autofunction:: disassemble_wasm_module
+
.. autofunction:: enable
.. autofunction:: evaluate_on_call_frame
@@ -101,6 +108,8 @@ to. For more information, see
.. autofunction:: get_wasm_bytecode
+.. autofunction:: next_wasm_disassembly_chunk
+
.. autofunction:: pause
.. autofunction:: pause_on_async_call
diff --git a/docs/api/dom.rst b/docs/api/dom.rst
index d629423..85e0eed 100644
--- a/docs/api/dom.rst
+++ b/docs/api/dom.rst
@@ -53,6 +53,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: PhysicalAxes
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: LogicalAxes
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Node
:members:
:undoc-members:
@@ -144,6 +154,8 @@ to. For more information, see
.. autofunction:: get_search_results
+.. autofunction:: get_top_layer_elements
+
.. autofunction:: hide_highlight
.. autofunction:: highlight_node
@@ -253,6 +265,11 @@ you use the event's attributes.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: TopLayerElementsUpdated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: PseudoElementRemoved
:members:
:undoc-members:
diff --git a/docs/api/dom_storage.rst b/docs/api/dom_storage.rst
index d0ab1a1..699dbfd 100644
--- a/docs/api/dom_storage.rst
+++ b/docs/api/dom_storage.rst
@@ -19,6 +19,11 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
+.. autoclass:: SerializedStorageKey
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: StorageId
:members:
:undoc-members:
diff --git a/docs/api/emulation.rst b/docs/api/emulation.rst
index 5abce21..019e50d 100644
--- a/docs/api/emulation.rst
+++ b/docs/api/emulation.rst
@@ -76,6 +76,8 @@ to. For more information, see
.. autofunction:: set_auto_dark_mode_override
+.. autofunction:: set_automation_override
+
.. autofunction:: set_cpu_throttling_rate
.. autofunction:: set_default_background_color_override
@@ -96,6 +98,8 @@ to. For more information, see
.. autofunction:: set_geolocation_override
+.. autofunction:: set_hardware_concurrency_override
+
.. autofunction:: set_idle_override
.. autofunction:: set_locale_override
diff --git a/docs/api/headless_experimental.rst b/docs/api/headless_experimental.rst
index a87a3b2..175051d 100644
--- a/docs/api/headless_experimental.rst
+++ b/docs/api/headless_experimental.rst
@@ -45,11 +45,4 @@ to. For more information, see
Events
------
-Generally, you do not need to instantiate CDP events
-yourself. Instead, the API creates events for you and then
-you use the event's attributes.
-
-.. autoclass:: NeedsBeginFramesChanged
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
+*There are no events in this module.*
diff --git a/docs/api/media.rst b/docs/api/media.rst
index a167f4e..30175fd 100644
--- a/docs/api/media.rst
+++ b/docs/api/media.rst
@@ -44,6 +44,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: PlayerErrorSourceLocation
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: PlayerError
:members:
:undoc-members:
diff --git a/docs/api/network.rst b/docs/api/network.rst
index 025d6a3..0347d18 100644
--- a/docs/api/network.rst
+++ b/docs/api/network.rst
@@ -143,6 +143,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AlternateProtocolUsage
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: Response
:members:
:undoc-members:
diff --git a/docs/api/page.rst b/docs/api/page.rst
index 3a8cf5d..79d385d 100644
--- a/docs/api/page.rst
+++ b/docs/api/page.rst
@@ -37,6 +37,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AdScriptId
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: SecureContextType
:members:
:undoc-members:
@@ -217,6 +222,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: AutoResponseMode
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: NavigationType
:members:
:undoc-members:
@@ -288,6 +298,8 @@ to. For more information, see
.. autofunction:: generate_test_report
+.. autofunction:: get_ad_script_id
+
.. autofunction:: get_app_id
.. autofunction:: get_app_manifest
@@ -356,6 +368,8 @@ to. For more information, see
.. autofunction:: set_lifecycle_events_enabled
+.. autofunction:: set_rph_registration_mode
+
.. autofunction:: set_spc_transaction_mode
.. autofunction:: set_touch_emulation_enabled
diff --git a/docs/api/profiler.rst b/docs/api/profiler.rst
index 02c25d6..6baeafa 100644
--- a/docs/api/profiler.rst
+++ b/docs/api/profiler.rst
@@ -45,21 +45,6 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
-.. autoclass:: TypeObject
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
-
-.. autoclass:: TypeProfileEntry
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
-
-.. autoclass:: ScriptTypeProfile
- :members:
- :undoc-members:
- :exclude-members: from_json, to_json
-
Commands
--------
@@ -84,18 +69,12 @@ to. For more information, see
.. autofunction:: start_precise_coverage
-.. autofunction:: start_type_profile
-
.. autofunction:: stop
.. autofunction:: stop_precise_coverage
-.. autofunction:: stop_type_profile
-
.. autofunction:: take_precise_coverage
-.. autofunction:: take_type_profile
-
Events
------
diff --git a/docs/api/runtime.rst b/docs/api/runtime.rst
index 056955e..a2db410 100644
--- a/docs/api/runtime.rst
+++ b/docs/api/runtime.rst
@@ -26,6 +26,11 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: WebDriverValue
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: RemoteObjectId
:members:
:undoc-members:
@@ -154,6 +159,8 @@ to. For more information, see
.. autofunction:: evaluate
+.. autofunction:: get_exception_details
+
.. autofunction:: get_heap_usage
.. autofunction:: get_isolate_id
diff --git a/docs/api/storage.rst b/docs/api/storage.rst
index 4989fb9..157a977 100644
--- a/docs/api/storage.rst
+++ b/docs/api/storage.rst
@@ -17,6 +17,11 @@ yourself. Instead, the API creates objects for you as return
values from commands, and then you can use those objects as
arguments to other commands.
+.. autoclass:: SerializedStorageKey
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: StorageType
:members:
:undoc-members:
@@ -47,6 +52,46 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: SharedStorageAccessType
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageEntry
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageMetadata
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageReportingMetadata
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageUrlWithMetadata
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageAccessParams
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: StorageBucketsDurability
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: StorageBucketInfo
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
Commands
--------
@@ -63,30 +108,60 @@ to. For more information, see
.. autofunction:: clear_data_for_origin
+.. autofunction:: clear_data_for_storage_key
+
+.. autofunction:: clear_shared_storage_entries
+
.. autofunction:: clear_trust_tokens
+.. autofunction:: delete_shared_storage_entry
+
+.. autofunction:: delete_storage_bucket
+
.. autofunction:: get_cookies
.. autofunction:: get_interest_group_details
+.. autofunction:: get_shared_storage_entries
+
+.. autofunction:: get_shared_storage_metadata
+
+.. autofunction:: get_storage_key_for_frame
+
.. autofunction:: get_trust_tokens
.. autofunction:: get_usage_and_quota
.. autofunction:: override_quota_for_origin
+.. autofunction:: reset_shared_storage_budget
+
.. autofunction:: set_cookies
.. autofunction:: set_interest_group_tracking
+.. autofunction:: set_shared_storage_entry
+
+.. autofunction:: set_shared_storage_tracking
+
+.. autofunction:: set_storage_bucket_tracking
+
.. autofunction:: track_cache_storage_for_origin
+.. autofunction:: track_cache_storage_for_storage_key
+
.. autofunction:: track_indexed_db_for_origin
+.. autofunction:: track_indexed_db_for_storage_key
+
.. autofunction:: untrack_cache_storage_for_origin
+.. autofunction:: untrack_cache_storage_for_storage_key
+
.. autofunction:: untrack_indexed_db_for_origin
+.. autofunction:: untrack_indexed_db_for_storage_key
+
Events
------
@@ -118,3 +193,18 @@ you use the event's attributes.
:members:
:undoc-members:
:exclude-members: from_json, to_json
+
+.. autoclass:: SharedStorageAccessed
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: StorageBucketCreatedOrUpdated
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: StorageBucketDeleted
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/docs/api/system_info.rst b/docs/api/system_info.rst
index 1b595c7..5dbbd7b 100644
--- a/docs/api/system_info.rst
+++ b/docs/api/system_info.rst
@@ -76,6 +76,8 @@ commands, and ``z`` is the return type you should pay attention
to. For more information, see
:ref:`Getting Started: Commands `.
+.. autofunction:: get_feature_state
+
.. autofunction:: get_info
.. autofunction:: get_process_info
diff --git a/docs/api/target.rst b/docs/api/target.rst
index 263ae8a..38edd3b 100644
--- a/docs/api/target.rst
+++ b/docs/api/target.rst
@@ -32,6 +32,16 @@ arguments to other commands.
:undoc-members:
:exclude-members: from_json, to_json
+.. autoclass:: FilterEntry
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: TargetFilter
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
.. autoclass:: RemoteLocation
:members:
:undoc-members:
diff --git a/docs/api/web_authn.rst b/docs/api/web_authn.rst
index 8a1a0b9..cc38dd3 100644
--- a/docs/api/web_authn.rst
+++ b/docs/api/web_authn.rst
@@ -82,9 +82,23 @@ to. For more information, see
.. autofunction:: set_automatic_presence_simulation
+.. autofunction:: set_response_override_bits
+
.. autofunction:: set_user_verified
Events
------
-*There are no events in this module.*
+Generally, you do not need to instantiate CDP events
+yourself. Instead, the API creates events for you and then
+you use the event's attributes.
+
+.. autoclass:: CredentialAdded
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
+
+.. autoclass:: CredentialAsserted
+ :members:
+ :undoc-members:
+ :exclude-members: from_json, to_json
diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py
index 5544490..e7fc217 100644
--- a/pycdp/gen/generate.py
+++ b/pycdp/gen/generate.py
@@ -1021,7 +1021,7 @@ def selfgen():
module_file.write(domain.generate_code())
generate_init(output_path / '__init__.py', domains)
- generate_docs(here.parent / 'docs' / 'api', domains)
+ generate_docs(here.parent.parent / 'docs' / 'api', domains)
(output_path / 'README.md').write_text(GENERATED_PACKAGE_NOTICE)
(output_path / 'py.typed').touch()
From 735645db801cf31b67bd70e14dfc2406f311e2a3 Mon Sep 17 00:00:00 2001
From: TurboKach
Date: Sat, 22 Apr 2023 02:24:46 +0800
Subject: [PATCH 64/81] updated CDP protocol
---
poetry.lock | 1349 ++++++++++++++--------------
pycdp/cdp/__init__.py | 2 +-
pycdp/cdp/accessibility.py | 58 +-
pycdp/cdp/animation.py | 10 +-
pycdp/cdp/audits.py | 261 +++---
pycdp/cdp/background_service.py | 5 +
pycdp/cdp/browser.py | 28 +-
pycdp/cdp/cache_storage.py | 16 +-
pycdp/cdp/cast.py | 2 +-
pycdp/cdp/console.py | 6 +-
pycdp/cdp/css.py | 394 ++++++--
pycdp/cdp/database.py | 6 +-
pycdp/cdp/debugger.py | 221 +++--
pycdp/cdp/dom.py | 165 +++-
pycdp/cdp/dom_debugger.py | 6 +-
pycdp/cdp/dom_snapshot.py | 117 +--
pycdp/cdp/dom_storage.py | 29 +-
pycdp/cdp/emulation.py | 59 +-
pycdp/cdp/fetch.py | 30 +-
pycdp/cdp/headless_experimental.py | 39 +-
pycdp/cdp/heap_profiler.py | 28 +-
pycdp/cdp/indexed_db.py | 105 ++-
pycdp/cdp/input_.py | 24 +-
pycdp/cdp/io.py | 2 +-
pycdp/cdp/layer_tree.py | 24 +-
pycdp/cdp/log.py | 14 +-
pycdp/cdp/media.py | 59 +-
pycdp/cdp/network.py | 281 +++---
pycdp/cdp/overlay.py | 131 +--
pycdp/cdp/page.py | 242 +++--
pycdp/cdp/performance_timeline.py | 14 +-
pycdp/cdp/profiler.py | 130 +--
pycdp/cdp/runtime.py | 190 ++--
pycdp/cdp/security.py | 16 +-
pycdp/cdp/service_worker.py | 8 +-
pycdp/cdp/storage.py | 660 +++++++++++++-
pycdp/cdp/system_info.py | 27 +-
pycdp/cdp/target.py | 97 +-
pycdp/cdp/tracing.py | 39 +-
pycdp/cdp/web_audio.py | 14 +-
pycdp/cdp/web_authn.py | 103 ++-
pycdp/gen/generate.py | 5 +-
pyproject.toml | 2 +-
update-cdp.sh | 38 +-
44 files changed, 3398 insertions(+), 1658 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 3a2d76e..8fae0f2 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,3 +1,5 @@
+# This file is automatically @generated by Poetry and should not be changed by hand.
+
[[package]]
name = "aiohttp"
version = "3.8.1"
@@ -5,6 +7,80 @@ description = "Async http client/server framework (asyncio)"
category = "main"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"},
+ {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"},
+ {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"},
+ {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"},
+ {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"},
+ {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"},
+ {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"},
+ {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"},
+ {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"},
+ {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"},
+ {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"},
+ {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"},
+ {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"},
+ {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"},
+ {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"},
+]
[package.dependencies]
aiosignal = ">=1.1.2"
@@ -20,22 +96,30 @@ speedups = ["Brotli", "aiodns", "cchardet"]
[[package]]
name = "aiosignal"
-version = "1.2.0"
+version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
[package.dependencies]
frozenlist = ">=1.1.0"
[[package]]
name = "alabaster"
-version = "0.7.12"
+version = "0.7.13"
description = "A configurable sidebar-enabled Sphinx theme"
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
+files = [
+ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"},
+ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
+]
[[package]]
name = "async-timeout"
@@ -44,66 +128,94 @@ description = "Timeout context manager for asyncio programs"
category = "main"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
[[package]]
name = "atomicwrites"
-version = "1.4.0"
+version = "1.4.1"
description = "Atomic file writes."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"},
+]
[[package]]
name = "attrs"
-version = "21.4.0"
+version = "23.1.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = ">=3.7"
+files = [
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+]
[package.extras]
-dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
-docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
-tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
-tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
[[package]]
name = "babel"
-version = "2.9.1"
+version = "2.12.1"
description = "Internationalization utilities"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.7"
+files = [
+ {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
+ {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
+]
[package.dependencies]
-pytz = ">=2015.7"
+pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""}
[[package]]
name = "certifi"
-version = "2021.10.8"
+version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.6"
+files = [
+ {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
+ {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
+]
[[package]]
name = "charset-normalizer"
-version = "2.0.12"
+version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
-python-versions = ">=3.5.0"
+python-versions = ">=3.6.0"
+files = [
+ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
+ {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
+]
[package.extras]
unicode-backport = ["unicodedata2"]
[[package]]
name = "colorama"
-version = "0.4.4"
+version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
[[package]]
name = "deprecated"
@@ -112,6 +224,10 @@ description = "Python @deprecated decorator to deprecate old python classes, fun
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"},
+ {file = "Deprecated-1.2.9.tar.gz", hash = "sha256:0cf37d293a96805c6afd8b5fc525cb40f23a2cac9b2d066ac3bd4b04e72ceccc"},
+]
[package.dependencies]
wrapt = ">=1.10,<2"
@@ -126,30 +242,118 @@ description = "Docutils -- Python Documentation Utilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
+ {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
+]
[[package]]
name = "frozenlist"
-version = "1.3.0"
+version = "1.3.3"
description = "A list-like structure which implements collections.abc.MutableSequence"
category = "main"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
+ {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+]
[[package]]
name = "idna"
-version = "3.3"
+version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
[[package]]
name = "imagesize"
-version = "1.3.0"
+version = "1.4.1"
description = "Getting image size from png/jpeg/jpeg2000/gif file"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
[[package]]
name = "inflection"
@@ -158,14 +362,22 @@ description = "A port of Ruby on Rails inflector to Python"
category = "main"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"},
+ {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"},
+]
[[package]]
name = "jinja2"
-version = "3.1.1"
+version = "3.1.2"
description = "A very fast and expressive template engine."
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
[package.dependencies]
MarkupSafe = ">=2.0"
@@ -175,27 +387,159 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "markupsafe"
-version = "2.1.1"
+version = "2.1.2"
description = "Safely add untrusted strings to HTML/XML markup."
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
[[package]]
name = "more-itertools"
-version = "8.12.0"
+version = "9.1.0"
description = "More routines for operating on iterables, beyond itertools"
category = "dev"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.7"
+files = [
+ {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"},
+ {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"},
+]
[[package]]
name = "multidict"
-version = "6.0.2"
+version = "6.0.4"
description = "multidict implementation"
category = "main"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
[[package]]
name = "mypy"
@@ -204,6 +548,22 @@ description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
+ {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
+ {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"},
+ {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"},
+ {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"},
+ {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"},
+ {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"},
+ {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"},
+ {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"},
+ {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"},
+ {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"},
+ {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"},
+ {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"},
+ {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"},
+]
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
@@ -215,22 +575,26 @@ dmypy = ["psutil (>=4.0)"]
[[package]]
name = "mypy-extensions"
-version = "0.4.3"
+version = "0.4.4"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=2.7"
+files = [
+ {file = "mypy_extensions-0.4.4.tar.gz", hash = "sha256:c8b707883a96efe9b4bb3aaf0dcc07e7e217d7d8368eec4db4049ee9e142f4fd"},
+]
[[package]]
name = "packaging"
-version = "21.3"
+version = "23.1"
description = "Core utilities for Python packages"
category = "dev"
optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+]
[[package]]
name = "pluggy"
@@ -239,6 +603,10 @@ description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
+ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
+]
[package.extras]
dev = ["pre-commit", "tox"]
@@ -250,25 +618,25 @@ description = "library with cross-python path, ini-parsing, io, code, log facili
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
+]
[[package]]
name = "pygments"
-version = "2.11.2"
+version = "2.15.1"
description = "Pygments is a syntax highlighting package written in Python."
category = "dev"
optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "pyparsing"
-version = "3.0.7"
-description = "Python parsing module"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
+ {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
+]
[package.extras]
-diagrams = ["jinja2", "railroad-diagrams"]
+plugins = ["importlib-metadata"]
[[package]]
name = "pytest"
@@ -277,6 +645,10 @@ description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
+ {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
+]
[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
@@ -294,37 +666,49 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm
[[package]]
name = "pytz"
-version = "2022.1"
+version = "2023.3"
description = "World timezone definitions, modern and historical"
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
+ {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+]
[[package]]
name = "requests"
-version = "2.27.1"
+version = "2.28.2"
description = "Python HTTP for Humans."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.7, <4"
+files = [
+ {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
+ {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
+]
[package.dependencies]
certifi = ">=2017.4.17"
-charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
-idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+charset-normalizer = ">=2,<4"
+idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
-socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
-use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
+socks = ["PySocks (>=1.5.6,!=1.5.7)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
-version = "65.7.0"
+version = "67.7.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "setuptools-67.7.1-py3-none-any.whl", hash = "sha256:6f0839fbdb7e3cfef1fc38d7954f5c1c26bf4eebb155a55c9bf8faf997b9fb67"},
+ {file = "setuptools-67.7.1.tar.gz", hash = "sha256:bb16732e8eb928922eabaa022f881ae2b7cdcfaf9993ef1f5e841a96d32b8e0c"},
+]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
@@ -338,6 +722,10 @@ description = "This package provides 29 stemmers for 28 languages generated from
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
[[package]]
name = "sphinx"
@@ -346,6 +734,10 @@ description = "Python documentation generator"
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"},
+ {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"},
+]
[package.dependencies]
alabaster = ">=0.7,<0.8"
@@ -378,6 +770,10 @@ description = "Type hints (PEP 484) support for the Sphinx autodoc extension"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "sphinx-autodoc-typehints-1.12.0.tar.gz", hash = "sha256:193617d9dbe0847281b1399d369e74e34cd959c82e02c7efde077fca908a9f52"},
+ {file = "sphinx_autodoc_typehints-1.12.0-py3-none-any.whl", hash = "sha256:5e81776ec422dd168d688ab60f034fccfafbcd94329e9537712c93003bddc04a"},
+]
[package.dependencies]
Sphinx = ">=3.0"
@@ -393,17 +789,25 @@ description = "Read the Docs theme for Sphinx"
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"},
+ {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"},
+]
[package.dependencies]
sphinx = "*"
[[package]]
name = "sphinxcontrib-applehelp"
-version = "1.0.2"
-description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
+version = "1.0.4"
+description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
category = "dev"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.8"
+files = [
+ {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"},
+ {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"},
+]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
@@ -416,6 +820,10 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
+ {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
+]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
@@ -423,11 +831,15 @@ test = ["pytest"]
[[package]]
name = "sphinxcontrib-htmlhelp"
-version = "2.0.0"
+version = "2.0.1"
description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
+files = [
+ {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"},
+ {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"},
+]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
@@ -440,6 +852,10 @@ description = "A sphinx extension which renders display math in HTML via JavaScr
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
[package.extras]
test = ["flake8", "mypy", "pytest"]
@@ -451,6 +867,10 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
+ {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
+]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
@@ -463,6 +883,10 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
+ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
+]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
@@ -475,641 +899,254 @@ description = "a fork of Python 2 and 3 ast modules with type comment support"
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
+ {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
+ {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
+ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
+]
[[package]]
name = "typing-extensions"
-version = "4.1.1"
-description = "Backported and Experimental Type Hints for Python 3.6+"
+version = "4.5.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
+ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
+]
[[package]]
name = "urllib3"
-version = "1.26.9"
+version = "1.26.15"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+files = [
+ {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
+ {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
+]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "wcwidth"
-version = "0.2.5"
+version = "0.2.6"
description = "Measures the displayed width of unicode strings in a terminal"
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"},
+ {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
+]
[[package]]
name = "wrapt"
-version = "1.14.0"
+version = "1.15.0"
description = "Module for decorators, wrappers and monkey patching."
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+files = [
+ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
+ {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
+ {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
+ {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
+ {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
+ {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
+ {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
+ {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
+ {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
+ {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
+ {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
+ {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
+ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
+]
[[package]]
name = "yarl"
-version = "1.7.2"
+version = "1.9.1"
description = "Yet another URL library"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e124b283a04cc06d22443cae536f93d86cd55108fa369f22b8fe1f2288b2fe1c"},
+ {file = "yarl-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56956b13ec275de31fe4fb991510b735c4fb3e1b01600528c952b9ac90464430"},
+ {file = "yarl-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ecaa5755a39f6f26079bf13f336c67af589c222d76b53cd3824d3b684b84d1f1"},
+ {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92a101f6d5a9464e86092adc36cd40ef23d18a25bfb1eb32eaeb62edc22776bb"},
+ {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e37999e36f9f3ded78e9d839face6baa2abdf9344ea8ed2735f495736159de"},
+ {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef7e2f6c47c41e234600a02e1356b799761485834fe35d4706b0094cb3a587ee"},
+ {file = "yarl-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7a0075a55380b19aa43b9e8056e128b058460d71d75018a4f9d60ace01e78c"},
+ {file = "yarl-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f01351b7809182822b21061d2a4728b7b9e08f4585ba90ee4c5c4d3faa0812"},
+ {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6cf47fe9df9b1ededc77e492581cdb6890a975ad96b4172e1834f1b8ba0fc3ba"},
+ {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:098bdc06ffb4db39c73883325b8c738610199f5f12e85339afedf07e912a39af"},
+ {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:6cdb47cbbacae8e1d7941b0d504d0235d686090eef5212ca2450525905e9cf02"},
+ {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:73a4b46689f2d59c8ec6b71c9a0cdced4e7863dd6eb98a8c30ea610e191f9e1c"},
+ {file = "yarl-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65d952e464df950eed32bb5dcbc1b4443c7c2de4d7abd7265b45b1b3b27f5fa2"},
+ {file = "yarl-1.9.1-cp310-cp310-win32.whl", hash = "sha256:39a7a9108e9fc633ae381562f8f0355bb4ba00355218b5fb19cf5263fcdbfa68"},
+ {file = "yarl-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b63d41e0eecf3e3070d44f97456cf351fff7cb960e97ecb60a936b877ff0b4f6"},
+ {file = "yarl-1.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4295790981630c4dab9d6de7b0f555a4c8defe3ed7704a8e9e595a321e59a0f5"},
+ {file = "yarl-1.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b2b2382d59dec0f1fdca18ea429c4c4cee280d5e0dbc841180abb82e188cf6e9"},
+ {file = "yarl-1.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:575975d28795a61e82c85f114c02333ca54cbd325fd4e4b27598c9832aa732e7"},
+ {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bb794882818fae20ff65348985fdf143ea6dfaf6413814db1848120db8be33e"},
+ {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89da1fd6068553e3a333011cc17ad91c414b2100c32579ddb51517edc768b49c"},
+ {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d817593d345fefda2fae877accc8a0d9f47ada57086da6125fa02a62f6d1a94"},
+ {file = "yarl-1.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85aa6fd779e194901386709e0eedd45710b68af2709f82a84839c44314b68c10"},
+ {file = "yarl-1.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eed9827033b7f67ad12cb70bd0cb59d36029144a7906694317c2dbf5c9eb5ddd"},
+ {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df747104ef27ab1aa9a1145064fa9ea26ad8cf24bfcbdba7db7abf0f8b3676b9"},
+ {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:efec77851231410125cb5be04ec96fa4a075ca637f415a1f2d2c900b09032a8a"},
+ {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d5c407e530cf2979ea383885516ae79cc4f3c3530623acf5e42daf521f5c2564"},
+ {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f76edb386178a54ea7ceffa798cb830c3c22ab50ea10dfb25dc952b04848295f"},
+ {file = "yarl-1.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:75676110bce59944dd48fd18d0449bd37eaeb311b38a0c768f7670864b5f8b68"},
+ {file = "yarl-1.9.1-cp311-cp311-win32.whl", hash = "sha256:9ba5a18c4fbd408fe49dc5da85478a76bc75c1ce912d7fd7b43ed5297c4403e1"},
+ {file = "yarl-1.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:b20a5ddc4e243cbaa54886bfe9af6ffc4ba4ef58f17f1bb691e973eb65bba84d"},
+ {file = "yarl-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:791357d537a09a194f92b834f28c98d074e7297bac0a8f1d5b458a906cafa17c"},
+ {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89099c887338608da935ba8bee027564a94f852ac40e472de15d8309517ad5fe"},
+ {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:395ea180257a3742d09dcc5071739682a95f7874270ebe3982d6696caec75be0"},
+ {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90ebaf448b5f048352ec7c76cb8d452df30c27cb6b8627dfaa9cf742a14f141a"},
+ {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f878a78ed2ccfbd973cab46dd0933ecd704787724db23979e5731674d76eb36f"},
+ {file = "yarl-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390c2318d066962500045aa145f5412169bce842e734b8c3e6e3750ad5b817"},
+ {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f8e73f526140c1c32f5fca4cd0bc3b511a1abcd948f45b2a38a95e4edb76ca72"},
+ {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ac8e593df1fbea820da7676929f821a0c7c2cecb8477d010254ce8ed54328ea8"},
+ {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:01cf88cb80411978a14aa49980968c1aeb7c18a90ac978c778250dd234d8e0ba"},
+ {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:97d76a3128f48fa1c721ef8a50e2c2f549296b2402dc8a8cde12ff60ed922f53"},
+ {file = "yarl-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:01a073c9175481dfed6b40704a1b67af5a9435fc4a58a27d35fd6b303469b0c7"},
+ {file = "yarl-1.9.1-cp37-cp37m-win32.whl", hash = "sha256:ecad20c3ef57c513dce22f58256361d10550a89e8eaa81d5082f36f8af305375"},
+ {file = "yarl-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f5bcb80006efe9bf9f49ae89711253dd06df8053ff814622112a9219346566a7"},
+ {file = "yarl-1.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7ddebeabf384099814353a2956ed3ab5dbaa6830cc7005f985fcb03b5338f05"},
+ {file = "yarl-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:13a1ad1f35839b3bb5226f59816b71e243d95d623f5b392efaf8820ddb2b3cd5"},
+ {file = "yarl-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0cd87949d619157a0482c6c14e5011f8bf2bc0b91cb5087414d9331f4ef02dd"},
+ {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d21887cbcf6a3cc5951662d8222bc9c04e1b1d98eebe3bb659c3a04ed49b0eec"},
+ {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4764114e261fe49d5df9b316b3221493d177247825c735b2aae77bc2e340d800"},
+ {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abe37fd89a93ebe0010417ca671f422fa6fcffec54698f623b09f46b4d4a512"},
+ {file = "yarl-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fe3a1c073ab80a28a06f41d2b623723046709ed29faf2c56bea41848597d86"},
+ {file = "yarl-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3b5f8da07a21f2e57551f88a6709c2d340866146cf7351e5207623cfe8aad16"},
+ {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f6413ff5edfb9609e2769e32ce87a62353e66e75d264bf0eaad26fb9daa8f2"},
+ {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b5d5fb6c94b620a7066a3adb7c246c87970f453813979818e4707ac32ce4d7bd"},
+ {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f206adb89424dca4a4d0b31981869700e44cd62742527e26d6b15a510dd410a2"},
+ {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44fa6158e6b4b8ccfa2872c3900a226b29e8ce543ce3e48aadc99816afa8874d"},
+ {file = "yarl-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08c8599d6aa8a24425f8635f6c06fa8726afe3be01c8e53e236f519bcfa5db5b"},
+ {file = "yarl-1.9.1-cp38-cp38-win32.whl", hash = "sha256:6b09cce412386ea9b4dda965d8e78d04ac5b5792b2fa9cced3258ec69c7d1c16"},
+ {file = "yarl-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:09c56a32c26e24ef98d5757c5064e252836f621f9a8b42737773aa92936b8e08"},
+ {file = "yarl-1.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b86e98c3021b7e2740d8719bf074301361bf2f51221ca2765b7a58afbfbd9042"},
+ {file = "yarl-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5faf3ec98747318cb980aaf9addf769da68a66431fc203a373d95d7ee9c1fbb4"},
+ {file = "yarl-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a21789bdf28549d4eb1de6910cabc762c9f6ae3eef85efc1958197c1c6ef853b"},
+ {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8b8d4b478a9862447daef4cafc89d87ea4ed958672f1d11db7732b77ead49cc"},
+ {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:307a782736ebf994e7600dcaeea3b3113083584da567272f2075f1540919d6b3"},
+ {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46c4010de941e2e1365c07fb4418ddca10fcff56305a6067f5ae857f8c98f3a7"},
+ {file = "yarl-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab67d041c78e305ff3eef5e549304d843bd9b603c8855b68484ee663374ce15"},
+ {file = "yarl-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1baf8cdaaab65d9ccedbf8748d626ad648b74b0a4d033e356a2f3024709fb82f"},
+ {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:27efc2e324f72df02818cd72d7674b1f28b80ab49f33a94f37c6473c8166ce49"},
+ {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca14b84091700ae7c1fcd3a6000bd4ec1a3035009b8bcb94f246741ca840bb22"},
+ {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c3ca8d71b23bdf164b36d06df2298ec8a5bd3de42b17bf3e0e8e6a7489195f2c"},
+ {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:8c72a1dc7e2ea882cd3df0417c808ad3b69e559acdc43f3b096d67f2fb801ada"},
+ {file = "yarl-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d966cd59df9a4b218480562e8daab39e87e746b78a96add51a3ab01636fc4291"},
+ {file = "yarl-1.9.1-cp39-cp39-win32.whl", hash = "sha256:518a92a34c741836a315150460b5c1c71ae782d569eabd7acf53372e437709f7"},
+ {file = "yarl-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:78755ce43b6e827e65ec0c68be832f86d059fcf05d4b33562745ebcfa91b26b1"},
+ {file = "yarl-1.9.1.tar.gz", hash = "sha256:5ce0bcab7ec759062c818d73837644cde567ab8aa1e0d6c45db38dfb7c284441"},
+]
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
[metadata]
-lock-version = "1.1"
-python-versions = "^3.8"
-content-hash = "f01943eaad90b858f6366f55b6c2d4eab02e20e85fc4df4447611a3387a152a3"
-
-[metadata.files]
-aiohttp = [
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"},
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"},
- {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"},
- {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"},
- {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"},
- {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"},
- {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"},
- {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"},
- {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"},
- {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"},
- {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"},
- {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"},
- {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"},
- {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"},
- {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"},
- {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"},
- {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"},
- {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"},
- {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"},
- {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"},
- {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"},
- {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"},
- {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"},
- {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"},
- {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"},
- {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"},
- {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"},
- {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"},
-]
-aiosignal = [
- {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"},
- {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"},
-]
-alabaster = [
- {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
- {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
-]
-async-timeout = [
- {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
- {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
-]
-atomicwrites = [
- {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
- {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
-]
-attrs = [
- {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
- {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
-]
-babel = [
- {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"},
- {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"},
-]
-certifi = [
- {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
- {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
-]
-charset-normalizer = [
- {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
- {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
-]
-colorama = [
- {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
- {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
-]
-deprecated = [
- {file = "Deprecated-1.2.9-py2.py3-none-any.whl", hash = "sha256:55b41a15bda04c6a2c0d27dd4c2b7b81ffa6348c9cad8f077ac1978c59927ab9"},
- {file = "Deprecated-1.2.9.tar.gz", hash = "sha256:0cf37d293a96805c6afd8b5fc525cb40f23a2cac9b2d066ac3bd4b04e72ceccc"},
-]
-docutils = [
- {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
- {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
-]
-frozenlist = [
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"},
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"},
- {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"},
- {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"},
- {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"},
- {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"},
- {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"},
- {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"},
- {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"},
- {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"},
- {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"},
- {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"},
- {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"},
- {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"},
- {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"},
- {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"},
- {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"},
- {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"},
- {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"},
- {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"},
- {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"},
- {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"},
- {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"},
-]
-idna = [
- {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
- {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
-]
-imagesize = [
- {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"},
- {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"},
-]
-inflection = [
- {file = "inflection-0.4.0-py2.py3-none-any.whl", hash = "sha256:9a15d3598f01220e93f2207c432cfede50daff53137ce660fb8be838ef1ca6cc"},
- {file = "inflection-0.4.0.tar.gz", hash = "sha256:32a5c3341d9583ec319548b9015b7fbdf8c429cbcb575d326c33ae3a0e90d52c"},
-]
-jinja2 = [
- {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"},
- {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"},
-]
-markupsafe = [
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
- {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
- {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
- {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
- {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
- {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
-]
-more-itertools = [
- {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"},
- {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"},
-]
-multidict = [
- {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"},
- {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"},
- {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"},
- {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"},
- {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"},
- {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"},
- {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"},
- {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"},
- {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"},
- {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"},
- {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"},
- {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"},
- {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"},
- {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"},
- {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"},
- {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"},
- {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"},
- {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"},
- {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"},
- {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"},
- {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"},
- {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"},
- {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"},
-]
-mypy = [
- {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"},
- {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"},
- {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"},
- {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"},
- {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"},
- {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"},
- {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"},
- {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"},
- {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"},
- {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"},
- {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"},
- {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"},
- {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"},
- {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"},
-]
-mypy-extensions = [
- {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
- {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
-]
-packaging = [
- {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
- {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
-]
-pluggy = [
- {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
- {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
-]
-py = [
- {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
- {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
-pygments = [
- {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"},
- {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"},
-]
-pyparsing = [
- {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
- {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
-]
-pytest = [
- {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
- {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
-]
-pytz = [
- {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
- {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
-]
-requests = [
- {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
- {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
-]
-setuptools = [
- {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"},
- {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"},
-]
-snowballstemmer = [
- {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
- {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
-]
-sphinx = [
- {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"},
- {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"},
-]
-sphinx-autodoc-typehints = [
- {file = "sphinx-autodoc-typehints-1.12.0.tar.gz", hash = "sha256:193617d9dbe0847281b1399d369e74e34cd959c82e02c7efde077fca908a9f52"},
- {file = "sphinx_autodoc_typehints-1.12.0-py3-none-any.whl", hash = "sha256:5e81776ec422dd168d688ab60f034fccfafbcd94329e9537712c93003bddc04a"},
-]
-sphinx-rtd-theme = [
- {file = "sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl", hash = "sha256:00cf895504a7895ee433807c62094cf1e95f065843bf3acd17037c3e9a2becd4"},
- {file = "sphinx_rtd_theme-0.4.3.tar.gz", hash = "sha256:728607e34d60456d736cc7991fd236afb828b21b82f956c5ea75f94c8414040a"},
-]
-sphinxcontrib-applehelp = [
- {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
- {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
-]
-sphinxcontrib-devhelp = [
- {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
- {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
-]
-sphinxcontrib-htmlhelp = [
- {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"},
- {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"},
-]
-sphinxcontrib-jsmath = [
- {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
- {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
-]
-sphinxcontrib-qthelp = [
- {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
- {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
-]
-sphinxcontrib-serializinghtml = [
- {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"},
- {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"},
-]
-typed-ast = [
- {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
- {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
- {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
- {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
- {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
- {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
- {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
- {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
- {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
- {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
- {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
- {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
- {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
- {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
- {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
- {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
- {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
- {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
- {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
- {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
- {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
- {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
- {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
- {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
- {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
- {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
- {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
- {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
- {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
- {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
-]
-typing-extensions = [
- {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"},
- {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"},
-]
-urllib3 = [
- {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
- {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
-]
-wcwidth = [
- {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
- {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
-]
-wrapt = [
- {file = "wrapt-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:5a9a1889cc01ed2ed5f34574c90745fab1dd06ec2eee663e8ebeefe363e8efd7"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:9a3ff5fb015f6feb78340143584d9f8a0b91b6293d6b5cf4295b3e95d179b88c"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4b847029e2d5e11fd536c9ac3136ddc3f54bc9488a75ef7d040a3900406a91eb"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9a5a544861b21e0e7575b6023adebe7a8c6321127bb1d238eb40d99803a0e8bd"},
- {file = "wrapt-1.14.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:88236b90dda77f0394f878324cfbae05ae6fde8a84d548cfe73a75278d760291"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f0408e2dbad9e82b4c960274214af533f856a199c9274bd4aff55d4634dedc33"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9d8c68c4145041b4eeae96239802cfdfd9ef927754a5be3f50505f09f309d8c6"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:22626dca56fd7f55a0733e604f1027277eb0f4f3d95ff28f15d27ac25a45f71b"},
- {file = "wrapt-1.14.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:65bf3eb34721bf18b5a021a1ad7aa05947a1767d1aa272b725728014475ea7d5"},
- {file = "wrapt-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09d16ae7a13cff43660155383a2372b4aa09109c7127aa3f24c3cf99b891c330"},
- {file = "wrapt-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:debaf04f813ada978d7d16c7dfa16f3c9c2ec9adf4656efdc4defdf841fc2f0c"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:748df39ed634851350efa87690c2237a678ed794fe9ede3f0d79f071ee042561"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1807054aa7b61ad8d8103b3b30c9764de2e9d0c0978e9d3fc337e4e74bf25faa"},
- {file = "wrapt-1.14.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763a73ab377390e2af26042f685a26787c402390f682443727b847e9496e4a2a"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8529b07b49b2d89d6917cfa157d3ea1dfb4d319d51e23030664a827fe5fd2131"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68aeefac31c1f73949662ba8affaf9950b9938b712fb9d428fa2a07e40ee57f8"},
- {file = "wrapt-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59d7d92cee84a547d91267f0fea381c363121d70fe90b12cd88241bd9b0e1763"},
- {file = "wrapt-1.14.0-cp310-cp310-win32.whl", hash = "sha256:3a88254881e8a8c4784ecc9cb2249ff757fd94b911d5df9a5984961b96113fff"},
- {file = "wrapt-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a242871b3d8eecc56d350e5e03ea1854de47b17f040446da0e47dc3e0b9ad4d"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a65bffd24409454b889af33b6c49d0d9bcd1a219b972fba975ac935f17bdf627"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9d9fcd06c952efa4b6b95f3d788a819b7f33d11bea377be6b8980c95e7d10775"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:db6a0ddc1282ceb9032e41853e659c9b638789be38e5b8ad7498caac00231c23"},
- {file = "wrapt-1.14.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:14e7e2c5f5fca67e9a6d5f753d21f138398cad2b1159913ec9e9a67745f09ba3"},
- {file = "wrapt-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:6d9810d4f697d58fd66039ab959e6d37e63ab377008ef1d63904df25956c7db0"},
- {file = "wrapt-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:d808a5a5411982a09fef6b49aac62986274ab050e9d3e9817ad65b2791ed1425"},
- {file = "wrapt-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b77159d9862374da213f741af0c361720200ab7ad21b9f12556e0eb95912cd48"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a76a7527df8583112b24adc01748cd51a2d14e905b337a6fefa8b96fc708fb"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0057b5435a65b933cbf5d859cd4956624df37b8bf0917c71756e4b3d9958b9e"},
- {file = "wrapt-1.14.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0a4ca02752ced5f37498827e49c414d694ad7cf451ee850e3ff160f2bee9d3"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8c6be72eac3c14baa473620e04f74186c5d8f45d80f8f2b4eda6e1d18af808e8"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:21b1106bff6ece8cb203ef45b4f5778d7226c941c83aaaa1e1f0f4f32cc148cd"},
- {file = "wrapt-1.14.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:493da1f8b1bb8a623c16552fb4a1e164c0200447eb83d3f68b44315ead3f9036"},
- {file = "wrapt-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:89ba3d548ee1e6291a20f3c7380c92f71e358ce8b9e48161401e087e0bc740f8"},
- {file = "wrapt-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:729d5e96566f44fccac6c4447ec2332636b4fe273f03da128fff8d5559782b06"},
- {file = "wrapt-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:891c353e95bb11abb548ca95c8b98050f3620a7378332eb90d6acdef35b401d4"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23f96134a3aa24cc50614920cc087e22f87439053d886e474638c68c8d15dc80"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6807bcee549a8cb2f38f73f469703a1d8d5d990815c3004f21ddb68a567385ce"},
- {file = "wrapt-1.14.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6915682f9a9bc4cf2908e83caf5895a685da1fbd20b6d485dafb8e218a338279"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f2f3bc7cd9c9fcd39143f11342eb5963317bd54ecc98e3650ca22704b69d9653"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3a71dbd792cc7a3d772ef8cd08d3048593f13d6f40a11f3427c000cf0a5b36a0"},
- {file = "wrapt-1.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a0898a640559dec00f3614ffb11d97a2666ee9a2a6bad1259c9facd01a1d4d9"},
- {file = "wrapt-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:167e4793dc987f77fd476862d32fa404d42b71f6a85d3b38cbce711dba5e6b68"},
- {file = "wrapt-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d066ffc5ed0be00cd0352c95800a519cf9e4b5dd34a028d301bdc7177c72daf3"},
- {file = "wrapt-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9bdfa74d369256e4218000a629978590fd7cb6cf6893251dad13d051090436d"},
- {file = "wrapt-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2498762814dd7dd2a1d0248eda2afbc3dd9c11537bc8200a4b21789b6df6cd38"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f24ca7953f2643d59a9c87d6e272d8adddd4a53bb62b9208f36db408d7aafc7"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b835b86bd5a1bdbe257d610eecab07bf685b1af2a7563093e0e69180c1d4af1"},
- {file = "wrapt-1.14.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21650fa6907e523869e0396c5bd591cc326e5c1dd594dcdccac089561cacfb8"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:354d9fc6b1e44750e2a67b4b108841f5f5ea08853453ecbf44c81fdc2e0d50bd"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f83e9c21cd5275991076b2ba1cd35418af3504667affb4745b48937e214bafe"},
- {file = "wrapt-1.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61e1a064906ccba038aa3c4a5a82f6199749efbbb3cef0804ae5c37f550eded0"},
- {file = "wrapt-1.14.0-cp38-cp38-win32.whl", hash = "sha256:28c659878f684365d53cf59dc9a1929ea2eecd7ac65da762be8b1ba193f7e84f"},
- {file = "wrapt-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:b0ed6ad6c9640671689c2dbe6244680fe8b897c08fd1fab2228429b66c518e5e"},
- {file = "wrapt-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3f7e671fb19734c872566e57ce7fc235fa953d7c181bb4ef138e17d607dc8a1"},
- {file = "wrapt-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87fa943e8bbe40c8c1ba4086971a6fefbf75e9991217c55ed1bcb2f1985bd3d4"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4775a574e9d84e0212f5b18886cace049a42e13e12009bb0491562a48bb2b758"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d57677238a0c5411c76097b8b93bdebb02eb845814c90f0b01727527a179e4d"},
- {file = "wrapt-1.14.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00108411e0f34c52ce16f81f1d308a571df7784932cc7491d1e94be2ee93374b"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d332eecf307fca852d02b63f35a7872de32d5ba8b4ec32da82f45df986b39ff6"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f799def9b96a8ec1ef6b9c1bbaf2bbc859b87545efbecc4a78faea13d0e3a0"},
- {file = "wrapt-1.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47045ed35481e857918ae78b54891fac0c1d197f22c95778e66302668309336c"},
- {file = "wrapt-1.14.0-cp39-cp39-win32.whl", hash = "sha256:2eca15d6b947cfff51ed76b2d60fd172c6ecd418ddab1c5126032d27f74bc350"},
- {file = "wrapt-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb36fbb48b22985d13a6b496ea5fb9bb2a076fea943831643836c9f6febbcfdc"},
- {file = "wrapt-1.14.0.tar.gz", hash = "sha256:8323a43bd9c91f62bb7d4be74cc9ff10090e7ef820e27bfe8815c57e68261311"},
-]
-yarl = [
- {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"},
- {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"},
- {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"},
- {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"},
- {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"},
- {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"},
- {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"},
- {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"},
- {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"},
- {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"},
- {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"},
- {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"},
- {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"},
- {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"},
- {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"},
- {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"},
- {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"},
- {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"},
- {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"},
- {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"},
- {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"},
- {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"},
- {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"},
- {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"},
- {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"},
- {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"},
- {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"},
- {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"},
-]
+lock-version = "2.0"
+python-versions = "^3.8, <3.11"
+content-hash = "6411dd12806f87ff9b5989dec3c75be90fb91482635b9a0af5ea60331b5fb3d0"
diff --git a/pycdp/cdp/__init__.py b/pycdp/cdp/__init__.py
index bfead84..97c6897 100644
--- a/pycdp/cdp/__init__.py
+++ b/pycdp/cdp/__init__.py
@@ -3,4 +3,4 @@
# This file is generated from the CDP specification. If you need to make
# changes, edit the generator and regenerate all of the modules.
-from . import (accessibility, animation, audits, background_service, browser, css, cache_storage, cast, console, dom, dom_debugger, dom_snapshot, dom_storage, database, debugger, device_orientation, emulation, event_breakpoints, fetch, headless_experimental, heap_profiler, io, indexed_db, input_, inspector, layer_tree, log, media, memory, network, overlay, page, performance, performance_timeline, profiler, runtime, schema, security, service_worker, storage, system_info, target, tethering, tracing, web_audio, web_authn)
\ No newline at end of file
+from . import (accessibility, animation, audits, background_service, browser, css, cache_storage, cast, console, dom, dom_debugger, dom_snapshot, dom_storage, database, debugger, device_access, device_orientation, emulation, event_breakpoints, fed_cm, fetch, headless_experimental, heap_profiler, io, indexed_db, input_, inspector, layer_tree, log, media, memory, network, overlay, page, performance, performance_timeline, preload, profiler, runtime, schema, security, service_worker, storage, system_info, target, tethering, tracing, web_audio, web_authn)
\ No newline at end of file
diff --git a/pycdp/cdp/accessibility.py b/pycdp/cdp/accessibility.py
index 43cfa7f..3f3c5d4 100644
--- a/pycdp/cdp/accessibility.py
+++ b/pycdp/cdp/accessibility.py
@@ -160,14 +160,14 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AXValueSource:
return cls(
type_=AXValueSourceType.from_json(json['type']),
- value=AXValue.from_json(json['value']) if 'value' in json else None,
- attribute=str(json['attribute']) if 'attribute' in json else None,
- attribute_value=AXValue.from_json(json['attributeValue']) if 'attributeValue' in json else None,
- superseded=bool(json['superseded']) if 'superseded' in json else None,
- native_source=AXValueNativeSourceType.from_json(json['nativeSource']) if 'nativeSource' in json else None,
- native_source_value=AXValue.from_json(json['nativeSourceValue']) if 'nativeSourceValue' in json else None,
- invalid=bool(json['invalid']) if 'invalid' in json else None,
- invalid_reason=str(json['invalidReason']) if 'invalidReason' in json else None,
+ value=AXValue.from_json(json['value']) if json.get('value', None) is not None else None,
+ attribute=str(json['attribute']) if json.get('attribute', None) is not None else None,
+ attribute_value=AXValue.from_json(json['attributeValue']) if json.get('attributeValue', None) is not None else None,
+ superseded=bool(json['superseded']) if json.get('superseded', None) is not None else None,
+ native_source=AXValueNativeSourceType.from_json(json['nativeSource']) if json.get('nativeSource', None) is not None else None,
+ native_source_value=AXValue.from_json(json['nativeSourceValue']) if json.get('nativeSourceValue', None) is not None else None,
+ invalid=bool(json['invalid']) if json.get('invalid', None) is not None else None,
+ invalid_reason=str(json['invalidReason']) if json.get('invalidReason', None) is not None else None,
)
@@ -195,8 +195,8 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AXRelatedNode:
return cls(
backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']),
- idref=str(json['idref']) if 'idref' in json else None,
- text=str(json['text']) if 'text' in json else None,
+ idref=str(json['idref']) if json.get('idref', None) is not None else None,
+ text=str(json['text']) if json.get('text', None) is not None else None,
)
@@ -254,9 +254,9 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AXValue:
return cls(
type_=AXValueType.from_json(json['type']),
- value=json['value'] if 'value' in json else None,
- related_nodes=[AXRelatedNode.from_json(i) for i in json['relatedNodes']] if 'relatedNodes' in json else None,
- sources=[AXValueSource.from_json(i) for i in json['sources']] if 'sources' in json else None,
+ value=json['value'] if json.get('value', None) is not None else None,
+ related_nodes=[AXRelatedNode.from_json(i) for i in json['relatedNodes']] if json.get('relatedNodes', None) is not None else None,
+ sources=[AXValueSource.from_json(i) for i in json['sources']] if json.get('sources', None) is not None else None,
)
@@ -334,6 +334,9 @@ class AXNode:
#: This ``Node``'s role, whether explicit or implicit.
role: typing.Optional[AXValue] = None
+ #: This ``Node``'s Chrome raw role.
+ chrome_role: typing.Optional[AXValue] = None
+
#: The accessible name for this ``Node``.
name: typing.Optional[AXValue] = None
@@ -366,6 +369,8 @@ def to_json(self) -> T_JSON_DICT:
json['ignoredReasons'] = [i.to_json() for i in self.ignored_reasons]
if self.role is not None:
json['role'] = self.role.to_json()
+ if self.chrome_role is not None:
+ json['chromeRole'] = self.chrome_role.to_json()
if self.name is not None:
json['name'] = self.name.to_json()
if self.description is not None:
@@ -389,16 +394,17 @@ def from_json(cls, json: T_JSON_DICT) -> AXNode:
return cls(
node_id=AXNodeId.from_json(json['nodeId']),
ignored=bool(json['ignored']),
- ignored_reasons=[AXProperty.from_json(i) for i in json['ignoredReasons']] if 'ignoredReasons' in json else None,
- role=AXValue.from_json(json['role']) if 'role' in json else None,
- name=AXValue.from_json(json['name']) if 'name' in json else None,
- description=AXValue.from_json(json['description']) if 'description' in json else None,
- value=AXValue.from_json(json['value']) if 'value' in json else None,
- properties=[AXProperty.from_json(i) for i in json['properties']] if 'properties' in json else None,
- parent_id=AXNodeId.from_json(json['parentId']) if 'parentId' in json else None,
- child_ids=[AXNodeId.from_json(i) for i in json['childIds']] if 'childIds' in json else None,
- backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']) if 'backendDOMNodeId' in json else None,
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
+ ignored_reasons=[AXProperty.from_json(i) for i in json['ignoredReasons']] if json.get('ignoredReasons', None) is not None else None,
+ role=AXValue.from_json(json['role']) if json.get('role', None) is not None else None,
+ chrome_role=AXValue.from_json(json['chromeRole']) if json.get('chromeRole', None) is not None else None,
+ name=AXValue.from_json(json['name']) if json.get('name', None) is not None else None,
+ description=AXValue.from_json(json['description']) if json.get('description', None) is not None else None,
+ value=AXValue.from_json(json['value']) if json.get('value', None) is not None else None,
+ properties=[AXProperty.from_json(i) for i in json['properties']] if json.get('properties', None) is not None else None,
+ parent_id=AXNodeId.from_json(json['parentId']) if json.get('parentId', None) is not None else None,
+ child_ids=[AXNodeId.from_json(i) for i in json['childIds']] if json.get('childIds', None) is not None else None,
+ backend_dom_node_id=dom.BackendNodeId.from_json(json['backendDOMNodeId']) if json.get('backendDOMNodeId', None) is not None else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None,
)
@@ -437,7 +443,7 @@ def get_partial_ax_tree(
:param node_id: *(Optional)* Identifier of the node to get the partial accessibility tree for.
:param backend_node_id: *(Optional)* Identifier of the backend node to get the partial accessibility tree for.
:param object_id: *(Optional)* JavaScript object id of the node wrapper to get the partial accessibility tree for.
- :param fetch_relatives: *(Optional)* Whether to fetch this nodes ancestors, siblings and children. Defaults to true.
+ :param fetch_relatives: *(Optional)* Whether to fetch this node's ancestors, siblings and children. Defaults to true.
:returns: The ``Accessibility.AXNode`` for this DOM node, if it exists, plus its ancestors, siblings and children, if requested.
'''
params: T_JSON_DICT = dict()
@@ -459,7 +465,6 @@ def get_partial_ax_tree(
def get_full_ax_tree(
depth: typing.Optional[int] = None,
- max_depth: typing.Optional[int] = None,
frame_id: typing.Optional[page.FrameId] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[AXNode]]:
'''
@@ -468,15 +473,12 @@ def get_full_ax_tree(
**EXPERIMENTAL**
:param depth: *(Optional)* The maximum depth at which descendants of the root node should be retrieved. If omitted, the full tree is returned.
- :param max_depth: **(DEPRECATED)** *(Optional)* Deprecated. This parameter has been renamed to ```depth```. If depth is not provided, max_depth will be used.
:param frame_id: *(Optional)* The frame for whose document the AX tree should be retrieved. If omited, the root frame is used.
:returns:
'''
params: T_JSON_DICT = dict()
if depth is not None:
params['depth'] = depth
- if max_depth is not None:
- params['max_depth'] = max_depth
if frame_id is not None:
params['frameId'] = frame_id.to_json()
cmd_dict: T_JSON_DICT = {
diff --git a/pycdp/cdp/animation.py b/pycdp/cdp/animation.py
index d4c625d..a967832 100644
--- a/pycdp/cdp/animation.py
+++ b/pycdp/cdp/animation.py
@@ -78,8 +78,8 @@ def from_json(cls, json: T_JSON_DICT) -> Animation:
start_time=float(json['startTime']),
current_time=float(json['currentTime']),
type_=str(json['type']),
- source=AnimationEffect.from_json(json['source']) if 'source' in json else None,
- css_id=str(json['cssId']) if 'cssId' in json else None,
+ source=AnimationEffect.from_json(json['source']) if json.get('source', None) is not None else None,
+ css_id=str(json['cssId']) if json.get('cssId', None) is not None else None,
)
@@ -145,8 +145,8 @@ def from_json(cls, json: T_JSON_DICT) -> AnimationEffect:
direction=str(json['direction']),
fill=str(json['fill']),
easing=str(json['easing']),
- backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None,
- keyframes_rule=KeyframesRule.from_json(json['keyframesRule']) if 'keyframesRule' in json else None,
+ backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None,
+ keyframes_rule=KeyframesRule.from_json(json['keyframesRule']) if json.get('keyframesRule', None) is not None else None,
)
@@ -172,7 +172,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> KeyframesRule:
return cls(
keyframes=[KeyframeStyle.from_json(i) for i in json['keyframes']],
- name=str(json['name']) if 'name' in json else None,
+ name=str(json['name']) if json.get('name', None) is not None else None,
)
diff --git a/pycdp/cdp/audits.py b/pycdp/cdp/audits.py
index 0443193..2666e89 100644
--- a/pycdp/cdp/audits.py
+++ b/pycdp/cdp/audits.py
@@ -66,7 +66,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AffectedRequest:
return cls(
request_id=network.RequestId.from_json(json['requestId']),
- url=str(json['url']) if 'url' in json else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
)
@@ -89,23 +89,25 @@ def from_json(cls, json: T_JSON_DICT) -> AffectedFrame:
)
-class SameSiteCookieExclusionReason(enum.Enum):
+class CookieExclusionReason(enum.Enum):
EXCLUDE_SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "ExcludeSameSiteUnspecifiedTreatedAsLax"
EXCLUDE_SAME_SITE_NONE_INSECURE = "ExcludeSameSiteNoneInsecure"
EXCLUDE_SAME_SITE_LAX = "ExcludeSameSiteLax"
EXCLUDE_SAME_SITE_STRICT = "ExcludeSameSiteStrict"
EXCLUDE_INVALID_SAME_PARTY = "ExcludeInvalidSameParty"
EXCLUDE_SAME_PARTY_CROSS_PARTY_CONTEXT = "ExcludeSamePartyCrossPartyContext"
+ EXCLUDE_DOMAIN_NON_ASCII = "ExcludeDomainNonASCII"
+ EXCLUDE_THIRD_PARTY_COOKIE_BLOCKED_IN_FIRST_PARTY_SET = "ExcludeThirdPartyCookieBlockedInFirstPartySet"
def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> SameSiteCookieExclusionReason:
+ def from_json(cls, json: str) -> CookieExclusionReason:
return cls(json)
-class SameSiteCookieWarningReason(enum.Enum):
+class CookieWarningReason(enum.Enum):
WARN_SAME_SITE_UNSPECIFIED_CROSS_SITE_CONTEXT = "WarnSameSiteUnspecifiedCrossSiteContext"
WARN_SAME_SITE_NONE_INSECURE = "WarnSameSiteNoneInsecure"
WARN_SAME_SITE_UNSPECIFIED_LAX_ALLOW_UNSAFE = "WarnSameSiteUnspecifiedLaxAllowUnsafe"
@@ -114,16 +116,18 @@ class SameSiteCookieWarningReason(enum.Enum):
WARN_SAME_SITE_STRICT_CROSS_DOWNGRADE_LAX = "WarnSameSiteStrictCrossDowngradeLax"
WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_STRICT = "WarnSameSiteLaxCrossDowngradeStrict"
WARN_SAME_SITE_LAX_CROSS_DOWNGRADE_LAX = "WarnSameSiteLaxCrossDowngradeLax"
+ WARN_ATTRIBUTE_VALUE_EXCEEDS_MAX_SIZE = "WarnAttributeValueExceedsMaxSize"
+ WARN_DOMAIN_NON_ASCII = "WarnDomainNonASCII"
def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> SameSiteCookieWarningReason:
+ def from_json(cls, json: str) -> CookieWarningReason:
return cls(json)
-class SameSiteCookieOperation(enum.Enum):
+class CookieOperation(enum.Enum):
SET_COOKIE = "SetCookie"
READ_COOKIE = "ReadCookie"
@@ -131,24 +135,24 @@ def to_json(self) -> str:
return self.value
@classmethod
- def from_json(cls, json: str) -> SameSiteCookieOperation:
+ def from_json(cls, json: str) -> CookieOperation:
return cls(json)
@dataclass
-class SameSiteCookieIssueDetails:
+class CookieIssueDetails:
'''
This information is currently necessary, as the front-end has a difficult
time finding a specific cookie. With this, we can convey specific error
information without the cookie.
'''
- cookie_warning_reasons: typing.List[SameSiteCookieWarningReason]
+ cookie_warning_reasons: typing.List[CookieWarningReason]
- cookie_exclusion_reasons: typing.List[SameSiteCookieExclusionReason]
+ cookie_exclusion_reasons: typing.List[CookieExclusionReason]
#: Optionally identifies the site-for-cookies and the cookie url, which
#: may be used by the front-end as additional context.
- operation: SameSiteCookieOperation
+ operation: CookieOperation
#: If AffectedCookie is not set then rawCookieLine contains the raw
#: Set-Cookie header string. This hints at a problem where the
@@ -182,16 +186,16 @@ def to_json(self) -> T_JSON_DICT:
return json
@classmethod
- def from_json(cls, json: T_JSON_DICT) -> SameSiteCookieIssueDetails:
+ def from_json(cls, json: T_JSON_DICT) -> CookieIssueDetails:
return cls(
- cookie_warning_reasons=[SameSiteCookieWarningReason.from_json(i) for i in json['cookieWarningReasons']],
- cookie_exclusion_reasons=[SameSiteCookieExclusionReason.from_json(i) for i in json['cookieExclusionReasons']],
- operation=SameSiteCookieOperation.from_json(json['operation']),
- cookie=AffectedCookie.from_json(json['cookie']) if 'cookie' in json else None,
- raw_cookie_line=str(json['rawCookieLine']) if 'rawCookieLine' in json else None,
- site_for_cookies=str(json['siteForCookies']) if 'siteForCookies' in json else None,
- cookie_url=str(json['cookieUrl']) if 'cookieUrl' in json else None,
- request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
+ cookie_warning_reasons=[CookieWarningReason.from_json(i) for i in json['cookieWarningReasons']],
+ cookie_exclusion_reasons=[CookieExclusionReason.from_json(i) for i in json['cookieExclusionReasons']],
+ operation=CookieOperation.from_json(json['operation']),
+ cookie=AffectedCookie.from_json(json['cookie']) if json.get('cookie', None) is not None else None,
+ raw_cookie_line=str(json['rawCookieLine']) if json.get('rawCookieLine', None) is not None else None,
+ site_for_cookies=str(json['siteForCookies']) if json.get('siteForCookies', None) is not None else None,
+ cookie_url=str(json['cookieUrl']) if json.get('cookieUrl', None) is not None else None,
+ request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None,
)
@@ -209,6 +213,7 @@ def from_json(cls, json: str) -> MixedContentResolutionStatus:
class MixedContentResourceType(enum.Enum):
+ ATTRIBUTION_SRC = "AttributionSrc"
AUDIO = "Audio"
BEACON = "Beacon"
CSP_REPORT = "CSPReport"
@@ -287,9 +292,9 @@ def from_json(cls, json: T_JSON_DICT) -> MixedContentIssueDetails:
resolution_status=MixedContentResolutionStatus.from_json(json['resolutionStatus']),
insecure_url=str(json['insecureURL']),
main_resource_url=str(json['mainResourceURL']),
- resource_type=MixedContentResourceType.from_json(json['resourceType']) if 'resourceType' in json else None,
- request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
- frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None,
+ resource_type=MixedContentResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None,
+ request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None,
+ frame=AffectedFrame.from_json(json['frame']) if json.get('frame', None) is not None else None,
)
@@ -342,8 +347,8 @@ def from_json(cls, json: T_JSON_DICT) -> BlockedByResponseIssueDetails:
return cls(
request=AffectedRequest.from_json(json['request']),
reason=BlockedByResponseReason.from_json(json['reason']),
- parent_frame=AffectedFrame.from_json(json['parentFrame']) if 'parentFrame' in json else None,
- blocked_frame=AffectedFrame.from_json(json['blockedFrame']) if 'blockedFrame' in json else None,
+ parent_frame=AffectedFrame.from_json(json['parentFrame']) if json.get('parentFrame', None) is not None else None,
+ blocked_frame=AffectedFrame.from_json(json['blockedFrame']) if json.get('blockedFrame', None) is not None else None,
)
@@ -440,7 +445,7 @@ def from_json(cls, json: T_JSON_DICT) -> SourceCodeLocation:
url=str(json['url']),
line_number=int(json['lineNumber']),
column_number=int(json['columnNumber']),
- script_id=runtime.ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None,
+ script_id=runtime.ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None,
)
@@ -483,10 +488,10 @@ def from_json(cls, json: T_JSON_DICT) -> ContentSecurityPolicyIssueDetails:
violated_directive=str(json['violatedDirective']),
is_report_only=bool(json['isReportOnly']),
content_security_policy_violation_type=ContentSecurityPolicyViolationType.from_json(json['contentSecurityPolicyViolationType']),
- blocked_url=str(json['blockedURL']) if 'blockedURL' in json else None,
- frame_ancestor=AffectedFrame.from_json(json['frameAncestor']) if 'frameAncestor' in json else None,
- source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']) if 'sourceCodeLocation' in json else None,
- violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None,
+ blocked_url=str(json['blockedURL']) if json.get('blockedURL', None) is not None else None,
+ frame_ancestor=AffectedFrame.from_json(json['frameAncestor']) if json.get('frameAncestor', None) is not None else None,
+ source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']) if json.get('sourceCodeLocation', None) is not None else None,
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None,
)
@@ -577,9 +582,9 @@ def from_json(cls, json: T_JSON_DICT) -> TrustedWebActivityIssueDetails:
return cls(
url=str(json['url']),
violation_type=TwaQualityEnforcementViolationType.from_json(json['violationType']),
- http_status_code=int(json['httpStatusCode']) if 'httpStatusCode' in json else None,
- package_name=str(json['packageName']) if 'packageName' in json else None,
- signature=str(json['signature']) if 'signature' in json else None,
+ http_status_code=int(json['httpStatusCode']) if json.get('httpStatusCode', None) is not None else None,
+ package_name=str(json['packageName']) if json.get('packageName', None) is not None else None,
+ signature=str(json['signature']) if json.get('signature', None) is not None else None,
)
@@ -664,26 +669,28 @@ def from_json(cls, json: T_JSON_DICT) -> CorsIssueDetails:
cors_error_status=network.CorsErrorStatus.from_json(json['corsErrorStatus']),
is_warning=bool(json['isWarning']),
request=AffectedRequest.from_json(json['request']),
- location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None,
- initiator_origin=str(json['initiatorOrigin']) if 'initiatorOrigin' in json else None,
- resource_ip_address_space=network.IPAddressSpace.from_json(json['resourceIPAddressSpace']) if 'resourceIPAddressSpace' in json else None,
- client_security_state=network.ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None,
+ location=SourceCodeLocation.from_json(json['location']) if json.get('location', None) is not None else None,
+ initiator_origin=str(json['initiatorOrigin']) if json.get('initiatorOrigin', None) is not None else None,
+ resource_ip_address_space=network.IPAddressSpace.from_json(json['resourceIPAddressSpace']) if json.get('resourceIPAddressSpace', None) is not None else None,
+ client_security_state=network.ClientSecurityState.from_json(json['clientSecurityState']) if json.get('clientSecurityState', None) is not None else None,
)
class AttributionReportingIssueType(enum.Enum):
PERMISSION_POLICY_DISABLED = "PermissionPolicyDisabled"
- INVALID_ATTRIBUTION_SOURCE_EVENT_ID = "InvalidAttributionSourceEventId"
- INVALID_ATTRIBUTION_DATA = "InvalidAttributionData"
- ATTRIBUTION_SOURCE_UNTRUSTWORTHY_ORIGIN = "AttributionSourceUntrustworthyOrigin"
- ATTRIBUTION_UNTRUSTWORTHY_ORIGIN = "AttributionUntrustworthyOrigin"
- ATTRIBUTION_TRIGGER_DATA_TOO_LARGE = "AttributionTriggerDataTooLarge"
- ATTRIBUTION_EVENT_SOURCE_TRIGGER_DATA_TOO_LARGE = "AttributionEventSourceTriggerDataTooLarge"
- INVALID_ATTRIBUTION_SOURCE_EXPIRY = "InvalidAttributionSourceExpiry"
- INVALID_ATTRIBUTION_SOURCE_PRIORITY = "InvalidAttributionSourcePriority"
- INVALID_EVENT_SOURCE_TRIGGER_DATA = "InvalidEventSourceTriggerData"
- INVALID_TRIGGER_PRIORITY = "InvalidTriggerPriority"
- INVALID_TRIGGER_DEDUP_KEY = "InvalidTriggerDedupKey"
+ UNTRUSTWORTHY_REPORTING_ORIGIN = "UntrustworthyReportingOrigin"
+ INSECURE_CONTEXT = "InsecureContext"
+ INVALID_HEADER = "InvalidHeader"
+ INVALID_REGISTER_TRIGGER_HEADER = "InvalidRegisterTriggerHeader"
+ INVALID_ELIGIBLE_HEADER = "InvalidEligibleHeader"
+ SOURCE_AND_TRIGGER_HEADERS = "SourceAndTriggerHeaders"
+ SOURCE_IGNORED = "SourceIgnored"
+ TRIGGER_IGNORED = "TriggerIgnored"
+ OS_SOURCE_IGNORED = "OsSourceIgnored"
+ OS_TRIGGER_IGNORED = "OsTriggerIgnored"
+ INVALID_REGISTER_OS_SOURCE_HEADER = "InvalidRegisterOsSourceHeader"
+ INVALID_REGISTER_OS_TRIGGER_HEADER = "InvalidRegisterOsTriggerHeader"
+ WEB_AND_OS_HEADERS = "WebAndOsHeaders"
def to_json(self) -> str:
return self.value
@@ -697,12 +704,10 @@ def from_json(cls, json: str) -> AttributionReportingIssueType:
class AttributionReportingIssueDetails:
'''
Details for issues around "Attribution Reporting API" usage.
- Explainer: https://github.com/WICG/conversion-measurement-api
+ Explainer: https://github.com/WICG/attribution-reporting-api
'''
violation_type: AttributionReportingIssueType
- frame: typing.Optional[AffectedFrame] = None
-
request: typing.Optional[AffectedRequest] = None
violating_node_id: typing.Optional[dom.BackendNodeId] = None
@@ -712,8 +717,6 @@ class AttributionReportingIssueDetails:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['violationType'] = self.violation_type.to_json()
- if self.frame is not None:
- json['frame'] = self.frame.to_json()
if self.request is not None:
json['request'] = self.request.to_json()
if self.violating_node_id is not None:
@@ -726,10 +729,9 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AttributionReportingIssueDetails:
return cls(
violation_type=AttributionReportingIssueType.from_json(json['violationType']),
- frame=AffectedFrame.from_json(json['frame']) if 'frame' in json else None,
- request=AffectedRequest.from_json(json['request']) if 'request' in json else None,
- violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if 'violatingNodeId' in json else None,
- invalid_parameter=str(json['invalidParameter']) if 'invalidParameter' in json else None,
+ request=AffectedRequest.from_json(json['request']) if json.get('request', None) is not None else None,
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None,
+ invalid_parameter=str(json['invalidParameter']) if json.get('invalidParameter', None) is not None else None,
)
@@ -788,12 +790,22 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> NavigatorUserAgentIssueDetails:
return cls(
url=str(json['url']),
- location=SourceCodeLocation.from_json(json['location']) if 'location' in json else None,
+ location=SourceCodeLocation.from_json(json['location']) if json.get('location', None) is not None else None,
)
class GenericIssueErrorType(enum.Enum):
CROSS_ORIGIN_PORTAL_POST_MESSAGE_ERROR = "CrossOriginPortalPostMessageError"
+ FORM_LABEL_FOR_NAME_ERROR = "FormLabelForNameError"
+ FORM_DUPLICATE_ID_FOR_INPUT_ERROR = "FormDuplicateIdForInputError"
+ FORM_INPUT_WITH_NO_LABEL_ERROR = "FormInputWithNoLabelError"
+ FORM_AUTOCOMPLETE_ATTRIBUTE_EMPTY_ERROR = "FormAutocompleteAttributeEmptyError"
+ FORM_EMPTY_ID_AND_NAME_ATTRIBUTES_FOR_INPUT_ERROR = "FormEmptyIdAndNameAttributesForInputError"
+ FORM_ARIA_LABELLED_BY_TO_NON_EXISTING_ID = "FormAriaLabelledByToNonExistingId"
+ FORM_INPUT_ASSIGNED_AUTOCOMPLETE_VALUE_TO_ID_OR_NAME_ATTRIBUTE_ERROR = "FormInputAssignedAutocompleteValueToIdOrNameAttributeError"
+ FORM_LABEL_HAS_NEITHER_FOR_NOR_NESTED_INPUT = "FormLabelHasNeitherForNorNestedInput"
+ FORM_LABEL_FOR_MATCHES_NON_EXISTING_ID_ERROR = "FormLabelForMatchesNonExistingIdError"
+ FORM_INPUT_HAS_WRONG_BUT_WELL_INTENDED_AUTOCOMPLETE_VALUE_ERROR = "FormInputHasWrongButWellIntendedAutocompleteValueError"
def to_json(self) -> str:
return self.value
@@ -813,18 +825,28 @@ class GenericIssueDetails:
frame_id: typing.Optional[page.FrameId] = None
+ violating_node_id: typing.Optional[dom.BackendNodeId] = None
+
+ violating_node_attribute: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['errorType'] = self.error_type.to_json()
if self.frame_id is not None:
json['frameId'] = self.frame_id.to_json()
+ if self.violating_node_id is not None:
+ json['violatingNodeId'] = self.violating_node_id.to_json()
+ if self.violating_node_attribute is not None:
+ json['violatingNodeAttribute'] = self.violating_node_attribute
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> GenericIssueDetails:
return cls(
error_type=GenericIssueErrorType.from_json(json['errorType']),
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None,
+ violating_node_id=dom.BackendNodeId.from_json(json['violatingNodeId']) if json.get('violatingNodeId', None) is not None else None,
+ violating_node_attribute=str(json['violatingNodeAttribute']) if json.get('violatingNodeAttribute', None) is not None else None,
)
@@ -832,41 +854,52 @@ def from_json(cls, json: T_JSON_DICT) -> GenericIssueDetails:
class DeprecationIssueDetails:
'''
This issue tracks information needed to print a deprecation message.
- The formatting is inherited from the old console.log version, see more at:
- https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/deprecation.cc
- TODO(crbug.com/1264960): Re-work format to add i18n support per:
- https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/public/devtools_protocol/README.md
+ https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/frame/third_party/blink/renderer/core/frame/deprecation/README.md
'''
source_code_location: SourceCodeLocation
- deprecation_type: str
+ #: One of the deprecation names from third_party/blink/renderer/core/frame/deprecation/deprecation.json5
+ type_: str
affected_frame: typing.Optional[AffectedFrame] = None
- #: The content of the deprecation issue (this won't be translated),
- #: e.g. "window.inefficientLegacyStorageMethod will be removed in M97,
- #: around January 2022. Please use Web Storage or Indexed Database
- #: instead. This standard was abandoned in January, 1970. See
- #: https://www.chromestatus.com/feature/5684870116278272 for more details."
- message: typing.Optional[str] = None
-
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['sourceCodeLocation'] = self.source_code_location.to_json()
- json['deprecationType'] = self.deprecation_type
+ json['type'] = self.type_
if self.affected_frame is not None:
json['affectedFrame'] = self.affected_frame.to_json()
- if self.message is not None:
- json['message'] = self.message
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> DeprecationIssueDetails:
return cls(
source_code_location=SourceCodeLocation.from_json(json['sourceCodeLocation']),
- deprecation_type=str(json['deprecationType']),
- affected_frame=AffectedFrame.from_json(json['affectedFrame']) if 'affectedFrame' in json else None,
- message=str(json['message']) if 'message' in json else None,
+ type_=str(json['type']),
+ affected_frame=AffectedFrame.from_json(json['affectedFrame']) if json.get('affectedFrame', None) is not None else None,
+ )
+
+
+@dataclass
+class BounceTrackingIssueDetails:
+ '''
+ This issue warns about sites in the redirect chain of a finished navigation
+ that may be flagged as trackers and have their state cleared if they don't
+ receive a user interaction. Note that in this context 'site' means eTLD+1.
+ For example, if the URL ``https://example.test:80/bounce`` was in the
+ redirect chain, the site reported would be ``example.test``.
+ '''
+ tracking_sites: typing.List[str]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['trackingSites'] = [i for i in self.tracking_sites]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> BounceTrackingIssueDetails:
+ return cls(
+ tracking_sites=[str(i) for i in json['trackingSites']],
)
@@ -902,28 +935,42 @@ class FederatedAuthRequestIssueReason(enum.Enum):
'''
Represents the failure reason when a federated authentication reason fails.
Should be updated alongside RequestIdTokenStatus in
- third_party/blink/public/mojom/webid/federated_auth_request.mojom to include
+ third_party/blink/public/mojom/devtools/inspector_issue.mojom to include
all cases except for success.
'''
- APPROVAL_DECLINED = "ApprovalDeclined"
+ SHOULD_EMBARGO = "ShouldEmbargo"
TOO_MANY_REQUESTS = "TooManyRequests"
- MANIFEST_HTTP_NOT_FOUND = "ManifestHttpNotFound"
- MANIFEST_NO_RESPONSE = "ManifestNoResponse"
- MANIFEST_INVALID_RESPONSE = "ManifestInvalidResponse"
+ WELL_KNOWN_HTTP_NOT_FOUND = "WellKnownHttpNotFound"
+ WELL_KNOWN_NO_RESPONSE = "WellKnownNoResponse"
+ WELL_KNOWN_INVALID_RESPONSE = "WellKnownInvalidResponse"
+ WELL_KNOWN_LIST_EMPTY = "WellKnownListEmpty"
+ WELL_KNOWN_INVALID_CONTENT_TYPE = "WellKnownInvalidContentType"
+ CONFIG_NOT_IN_WELL_KNOWN = "ConfigNotInWellKnown"
+ WELL_KNOWN_TOO_BIG = "WellKnownTooBig"
+ CONFIG_HTTP_NOT_FOUND = "ConfigHttpNotFound"
+ CONFIG_NO_RESPONSE = "ConfigNoResponse"
+ CONFIG_INVALID_RESPONSE = "ConfigInvalidResponse"
+ CONFIG_INVALID_CONTENT_TYPE = "ConfigInvalidContentType"
CLIENT_METADATA_HTTP_NOT_FOUND = "ClientMetadataHttpNotFound"
CLIENT_METADATA_NO_RESPONSE = "ClientMetadataNoResponse"
CLIENT_METADATA_INVALID_RESPONSE = "ClientMetadataInvalidResponse"
+ CLIENT_METADATA_INVALID_CONTENT_TYPE = "ClientMetadataInvalidContentType"
+ DISABLED_IN_SETTINGS = "DisabledInSettings"
ERROR_FETCHING_SIGNIN = "ErrorFetchingSignin"
INVALID_SIGNIN_RESPONSE = "InvalidSigninResponse"
ACCOUNTS_HTTP_NOT_FOUND = "AccountsHttpNotFound"
ACCOUNTS_NO_RESPONSE = "AccountsNoResponse"
ACCOUNTS_INVALID_RESPONSE = "AccountsInvalidResponse"
+ ACCOUNTS_LIST_EMPTY = "AccountsListEmpty"
+ ACCOUNTS_INVALID_CONTENT_TYPE = "AccountsInvalidContentType"
ID_TOKEN_HTTP_NOT_FOUND = "IdTokenHttpNotFound"
ID_TOKEN_NO_RESPONSE = "IdTokenNoResponse"
ID_TOKEN_INVALID_RESPONSE = "IdTokenInvalidResponse"
ID_TOKEN_INVALID_REQUEST = "IdTokenInvalidRequest"
+ ID_TOKEN_INVALID_CONTENT_TYPE = "IdTokenInvalidContentType"
ERROR_ID_TOKEN = "ErrorIdToken"
CANCELED = "Canceled"
+ RP_PAGE_NOT_VISIBLE = "RpPageNotVisible"
def to_json(self) -> str:
return self.value
@@ -963,7 +1010,7 @@ class InspectorIssueCode(enum.Enum):
optional fields in InspectorIssueDetails to convey more specific
information about the kind of issue.
'''
- SAME_SITE_COOKIE_ISSUE = "SameSiteCookieIssue"
+ COOKIE_ISSUE = "CookieIssue"
MIXED_CONTENT_ISSUE = "MixedContentIssue"
BLOCKED_BY_RESPONSE_ISSUE = "BlockedByResponseIssue"
HEAVY_AD_ISSUE = "HeavyAdIssue"
@@ -979,6 +1026,7 @@ class InspectorIssueCode(enum.Enum):
DEPRECATION_ISSUE = "DeprecationIssue"
CLIENT_HINT_ISSUE = "ClientHintIssue"
FEDERATED_AUTH_REQUEST_ISSUE = "FederatedAuthRequestIssue"
+ BOUNCE_TRACKING_ISSUE = "BounceTrackingIssue"
def to_json(self) -> str:
return self.value
@@ -995,7 +1043,7 @@ class InspectorIssueDetails:
specific to the kind of issue. When adding a new issue code, please also
add a new optional field to this type.
'''
- same_site_cookie_issue_details: typing.Optional[SameSiteCookieIssueDetails] = None
+ cookie_issue_details: typing.Optional[CookieIssueDetails] = None
mixed_content_issue_details: typing.Optional[MixedContentIssueDetails] = None
@@ -1027,10 +1075,12 @@ class InspectorIssueDetails:
federated_auth_request_issue_details: typing.Optional[FederatedAuthRequestIssueDetails] = None
+ bounce_tracking_issue_details: typing.Optional[BounceTrackingIssueDetails] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- if self.same_site_cookie_issue_details is not None:
- json['sameSiteCookieIssueDetails'] = self.same_site_cookie_issue_details.to_json()
+ if self.cookie_issue_details is not None:
+ json['cookieIssueDetails'] = self.cookie_issue_details.to_json()
if self.mixed_content_issue_details is not None:
json['mixedContentIssueDetails'] = self.mixed_content_issue_details.to_json()
if self.blocked_by_response_issue_details is not None:
@@ -1061,27 +1111,30 @@ def to_json(self) -> T_JSON_DICT:
json['clientHintIssueDetails'] = self.client_hint_issue_details.to_json()
if self.federated_auth_request_issue_details is not None:
json['federatedAuthRequestIssueDetails'] = self.federated_auth_request_issue_details.to_json()
+ if self.bounce_tracking_issue_details is not None:
+ json['bounceTrackingIssueDetails'] = self.bounce_tracking_issue_details.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> InspectorIssueDetails:
return cls(
- same_site_cookie_issue_details=SameSiteCookieIssueDetails.from_json(json['sameSiteCookieIssueDetails']) if 'sameSiteCookieIssueDetails' in json else None,
- mixed_content_issue_details=MixedContentIssueDetails.from_json(json['mixedContentIssueDetails']) if 'mixedContentIssueDetails' in json else None,
- blocked_by_response_issue_details=BlockedByResponseIssueDetails.from_json(json['blockedByResponseIssueDetails']) if 'blockedByResponseIssueDetails' in json else None,
- heavy_ad_issue_details=HeavyAdIssueDetails.from_json(json['heavyAdIssueDetails']) if 'heavyAdIssueDetails' in json else None,
- content_security_policy_issue_details=ContentSecurityPolicyIssueDetails.from_json(json['contentSecurityPolicyIssueDetails']) if 'contentSecurityPolicyIssueDetails' in json else None,
- shared_array_buffer_issue_details=SharedArrayBufferIssueDetails.from_json(json['sharedArrayBufferIssueDetails']) if 'sharedArrayBufferIssueDetails' in json else None,
- twa_quality_enforcement_details=TrustedWebActivityIssueDetails.from_json(json['twaQualityEnforcementDetails']) if 'twaQualityEnforcementDetails' in json else None,
- low_text_contrast_issue_details=LowTextContrastIssueDetails.from_json(json['lowTextContrastIssueDetails']) if 'lowTextContrastIssueDetails' in json else None,
- cors_issue_details=CorsIssueDetails.from_json(json['corsIssueDetails']) if 'corsIssueDetails' in json else None,
- attribution_reporting_issue_details=AttributionReportingIssueDetails.from_json(json['attributionReportingIssueDetails']) if 'attributionReportingIssueDetails' in json else None,
- quirks_mode_issue_details=QuirksModeIssueDetails.from_json(json['quirksModeIssueDetails']) if 'quirksModeIssueDetails' in json else None,
- navigator_user_agent_issue_details=NavigatorUserAgentIssueDetails.from_json(json['navigatorUserAgentIssueDetails']) if 'navigatorUserAgentIssueDetails' in json else None,
- generic_issue_details=GenericIssueDetails.from_json(json['genericIssueDetails']) if 'genericIssueDetails' in json else None,
- deprecation_issue_details=DeprecationIssueDetails.from_json(json['deprecationIssueDetails']) if 'deprecationIssueDetails' in json else None,
- client_hint_issue_details=ClientHintIssueDetails.from_json(json['clientHintIssueDetails']) if 'clientHintIssueDetails' in json else None,
- federated_auth_request_issue_details=FederatedAuthRequestIssueDetails.from_json(json['federatedAuthRequestIssueDetails']) if 'federatedAuthRequestIssueDetails' in json else None,
+ cookie_issue_details=CookieIssueDetails.from_json(json['cookieIssueDetails']) if json.get('cookieIssueDetails', None) is not None else None,
+ mixed_content_issue_details=MixedContentIssueDetails.from_json(json['mixedContentIssueDetails']) if json.get('mixedContentIssueDetails', None) is not None else None,
+ blocked_by_response_issue_details=BlockedByResponseIssueDetails.from_json(json['blockedByResponseIssueDetails']) if json.get('blockedByResponseIssueDetails', None) is not None else None,
+ heavy_ad_issue_details=HeavyAdIssueDetails.from_json(json['heavyAdIssueDetails']) if json.get('heavyAdIssueDetails', None) is not None else None,
+ content_security_policy_issue_details=ContentSecurityPolicyIssueDetails.from_json(json['contentSecurityPolicyIssueDetails']) if json.get('contentSecurityPolicyIssueDetails', None) is not None else None,
+ shared_array_buffer_issue_details=SharedArrayBufferIssueDetails.from_json(json['sharedArrayBufferIssueDetails']) if json.get('sharedArrayBufferIssueDetails', None) is not None else None,
+ twa_quality_enforcement_details=TrustedWebActivityIssueDetails.from_json(json['twaQualityEnforcementDetails']) if json.get('twaQualityEnforcementDetails', None) is not None else None,
+ low_text_contrast_issue_details=LowTextContrastIssueDetails.from_json(json['lowTextContrastIssueDetails']) if json.get('lowTextContrastIssueDetails', None) is not None else None,
+ cors_issue_details=CorsIssueDetails.from_json(json['corsIssueDetails']) if json.get('corsIssueDetails', None) is not None else None,
+ attribution_reporting_issue_details=AttributionReportingIssueDetails.from_json(json['attributionReportingIssueDetails']) if json.get('attributionReportingIssueDetails', None) is not None else None,
+ quirks_mode_issue_details=QuirksModeIssueDetails.from_json(json['quirksModeIssueDetails']) if json.get('quirksModeIssueDetails', None) is not None else None,
+ navigator_user_agent_issue_details=NavigatorUserAgentIssueDetails.from_json(json['navigatorUserAgentIssueDetails']) if json.get('navigatorUserAgentIssueDetails', None) is not None else None,
+ generic_issue_details=GenericIssueDetails.from_json(json['genericIssueDetails']) if json.get('genericIssueDetails', None) is not None else None,
+ deprecation_issue_details=DeprecationIssueDetails.from_json(json['deprecationIssueDetails']) if json.get('deprecationIssueDetails', None) is not None else None,
+ client_hint_issue_details=ClientHintIssueDetails.from_json(json['clientHintIssueDetails']) if json.get('clientHintIssueDetails', None) is not None else None,
+ federated_auth_request_issue_details=FederatedAuthRequestIssueDetails.from_json(json['federatedAuthRequestIssueDetails']) if json.get('federatedAuthRequestIssueDetails', None) is not None else None,
+ bounce_tracking_issue_details=BounceTrackingIssueDetails.from_json(json['bounceTrackingIssueDetails']) if json.get('bounceTrackingIssueDetails', None) is not None else None,
)
@@ -1127,7 +1180,7 @@ def from_json(cls, json: T_JSON_DICT) -> InspectorIssue:
return cls(
code=InspectorIssueCode.from_json(json['code']),
details=InspectorIssueDetails.from_json(json['details']),
- issue_id=IssueId.from_json(json['issueId']) if 'issueId' in json else None,
+ issue_id=IssueId.from_json(json['issueId']) if json.get('issueId', None) is not None else None,
)
@@ -1164,7 +1217,7 @@ def get_encoded_response(
}
json = yield cmd_dict
return (
- str(json['body']) if 'body' in json else None,
+ str(json['body']) if json.get('body', None) is not None else None,
int(json['originalSize']),
int(json['encodedSize'])
)
diff --git a/pycdp/cdp/background_service.py b/pycdp/cdp/background_service.py
index f73331f..8fa4e98 100644
--- a/pycdp/cdp/background_service.py
+++ b/pycdp/cdp/background_service.py
@@ -82,6 +82,9 @@ class BackgroundServiceEvent:
#: A list of event-specific information.
event_metadata: typing.List[EventMetadata]
+ #: Storage key this event belongs to.
+ storage_key: str
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['timestamp'] = self.timestamp.to_json()
@@ -91,6 +94,7 @@ def to_json(self) -> T_JSON_DICT:
json['eventName'] = self.event_name
json['instanceId'] = self.instance_id
json['eventMetadata'] = [i.to_json() for i in self.event_metadata]
+ json['storageKey'] = self.storage_key
return json
@classmethod
@@ -103,6 +107,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackgroundServiceEvent:
event_name=str(json['eventName']),
instance_id=str(json['instanceId']),
event_metadata=[EventMetadata.from_json(i) for i in json['eventMetadata']],
+ storage_key=str(json['storageKey']),
)
diff --git a/pycdp/cdp/browser.py b/pycdp/cdp/browser.py
index 1646a1d..ee45c7d 100644
--- a/pycdp/cdp/browser.py
+++ b/pycdp/cdp/browser.py
@@ -93,11 +93,11 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> Bounds:
return cls(
- left=int(json['left']) if 'left' in json else None,
- top=int(json['top']) if 'top' in json else None,
- width=int(json['width']) if 'width' in json else None,
- height=int(json['height']) if 'height' in json else None,
- window_state=WindowState.from_json(json['windowState']) if 'windowState' in json else None,
+ left=int(json['left']) if json.get('left', None) is not None else None,
+ top=int(json['top']) if json.get('top', None) is not None else None,
+ width=int(json['width']) if json.get('width', None) is not None else None,
+ height=int(json['height']) if json.get('height', None) is not None else None,
+ window_state=WindowState.from_json(json['windowState']) if json.get('windowState', None) is not None else None,
)
@@ -112,6 +112,8 @@ class PermissionType(enum.Enum):
DURABLE_STORAGE = "durableStorage"
FLASH = "flash"
GEOLOCATION = "geolocation"
+ IDLE_DETECTION = "idleDetection"
+ LOCAL_FONTS = "localFonts"
MIDI = "midi"
MIDI_SYSEX = "midiSysex"
NFC = "nfc"
@@ -120,11 +122,13 @@ class PermissionType(enum.Enum):
PERIODIC_BACKGROUND_SYNC = "periodicBackgroundSync"
PROTECTED_MEDIA_IDENTIFIER = "protectedMediaIdentifier"
SENSORS = "sensors"
+ STORAGE_ACCESS = "storageAccess"
+ TOP_LEVEL_STORAGE_ACCESS = "topLevelStorageAccess"
VIDEO_CAPTURE = "videoCapture"
VIDEO_CAPTURE_PAN_TILT_ZOOM = "videoCapturePanTiltZoom"
- IDLE_DETECTION = "idleDetection"
WAKE_LOCK_SCREEN = "wakeLockScreen"
WAKE_LOCK_SYSTEM = "wakeLockSystem"
+ WINDOW_MANAGEMENT = "windowManagement"
def to_json(self) -> str:
return self.value
@@ -187,10 +191,10 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> PermissionDescriptor:
return cls(
name=str(json['name']),
- sysex=bool(json['sysex']) if 'sysex' in json else None,
- user_visible_only=bool(json['userVisibleOnly']) if 'userVisibleOnly' in json else None,
- allow_without_sanitization=bool(json['allowWithoutSanitization']) if 'allowWithoutSanitization' in json else None,
- pan_tilt_zoom=bool(json['panTiltZoom']) if 'panTiltZoom' in json else None,
+ sysex=bool(json['sysex']) if json.get('sysex', None) is not None else None,
+ user_visible_only=bool(json['userVisibleOnly']) if json.get('userVisibleOnly', None) is not None else None,
+ allow_without_sanitization=bool(json['allowWithoutSanitization']) if json.get('allowWithoutSanitization', None) is not None else None,
+ pan_tilt_zoom=bool(json['panTiltZoom']) if json.get('panTiltZoom', None) is not None else None,
)
@@ -490,7 +494,7 @@ def get_histograms(
**EXPERIMENTAL**
:param query: *(Optional)* Requested substring in name. Only histograms which have query as a substring in their name are extracted. An empty or absent query returns all histograms.
- :param delta: *(Optional)* If true, retrieve delta since last call.
+ :param delta: *(Optional)* If true, retrieve delta since last delta call.
:returns: Histograms.
'''
params: T_JSON_DICT = dict()
@@ -516,7 +520,7 @@ def get_histogram(
**EXPERIMENTAL**
:param name: Requested histogram name.
- :param delta: *(Optional)* If true, retrieve delta since last call.
+ :param delta: *(Optional)* If true, retrieve delta since last delta call.
:returns: Histogram.
'''
params: T_JSON_DICT = dict()
diff --git a/pycdp/cdp/cache_storage.py b/pycdp/cdp/cache_storage.py
index 0110ac7..9f7e533 100644
--- a/pycdp/cdp/cache_storage.py
+++ b/pycdp/cdp/cache_storage.py
@@ -112,6 +112,9 @@ class Cache:
#: Security origin of the cache.
security_origin: str
+ #: Storage key of the cache.
+ storage_key: str
+
#: The name of the cache.
cache_name: str
@@ -119,6 +122,7 @@ def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['cacheId'] = self.cache_id.to_json()
json['securityOrigin'] = self.security_origin
+ json['storageKey'] = self.storage_key
json['cacheName'] = self.cache_name
return json
@@ -127,6 +131,7 @@ def from_json(cls, json: T_JSON_DICT) -> Cache:
return cls(
cache_id=CacheId.from_json(json['cacheId']),
security_origin=str(json['securityOrigin']),
+ storage_key=str(json['storageKey']),
cache_name=str(json['cacheName']),
)
@@ -209,16 +214,21 @@ def delete_entry(
def request_cache_names(
- security_origin: str
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Cache]]:
'''
Requests cache names.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:returns: Caches for the security origin.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
cmd_dict: T_JSON_DICT = {
'method': 'CacheStorage.requestCacheNames',
'params': params,
diff --git a/pycdp/cdp/cast.py b/pycdp/cdp/cast.py
index a334b30..9ff2019 100644
--- a/pycdp/cdp/cast.py
+++ b/pycdp/cdp/cast.py
@@ -35,7 +35,7 @@ def from_json(cls, json: T_JSON_DICT) -> Sink:
return cls(
name=str(json['name']),
id_=str(json['id']),
- session=str(json['session']) if 'session' in json else None,
+ session=str(json['session']) if json.get('session', None) is not None else None,
)
diff --git a/pycdp/cdp/console.py b/pycdp/cdp/console.py
index ba67f39..3c4addb 100644
--- a/pycdp/cdp/console.py
+++ b/pycdp/cdp/console.py
@@ -54,9 +54,9 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleMessage:
source=str(json['source']),
level=str(json['level']),
text=str(json['text']),
- url=str(json['url']) if 'url' in json else None,
- line=int(json['line']) if 'line' in json else None,
- column=int(json['column']) if 'column' in json else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ line=int(json['line']) if json.get('line', None) is not None else None,
+ column=int(json['column']) if json.get('column', None) is not None else None,
)
diff --git a/pycdp/cdp/css.py b/pycdp/cdp/css.py
index 3c4181d..d4b8ed9 100644
--- a/pycdp/cdp/css.py
+++ b/pycdp/cdp/css.py
@@ -57,10 +57,15 @@ class PseudoElementMatches:
#: Matches of CSS rules applicable to the pseudo style.
matches: typing.List[RuleMatch]
+ #: Pseudo element custom ident.
+ pseudo_identifier: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['pseudoType'] = self.pseudo_type.to_json()
json['matches'] = [i.to_json() for i in self.matches]
+ if self.pseudo_identifier is not None:
+ json['pseudoIdentifier'] = self.pseudo_identifier
return json
@classmethod
@@ -68,6 +73,7 @@ def from_json(cls, json: T_JSON_DICT) -> PseudoElementMatches:
return cls(
pseudo_type=dom.PseudoType.from_json(json['pseudoType']),
matches=[RuleMatch.from_json(i) for i in json['matches']],
+ pseudo_identifier=str(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None,
)
@@ -93,7 +99,27 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> InheritedStyleEntry:
return cls(
matched_css_rules=[RuleMatch.from_json(i) for i in json['matchedCSSRules']],
- inline_style=CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None,
+ inline_style=CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None,
+ )
+
+
+@dataclass
+class InheritedPseudoElementMatches:
+ '''
+ Inherited pseudo element matches from pseudos of an ancestor node.
+ '''
+ #: Matches of pseudo styles from the pseudos of an ancestor node.
+ pseudo_elements: typing.List[PseudoElementMatches]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['pseudoElements'] = [i.to_json() for i in self.pseudo_elements]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> InheritedPseudoElementMatches:
+ return cls(
+ pseudo_elements=[PseudoElementMatches.from_json(i) for i in json['pseudoElements']],
)
@@ -144,7 +170,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> Value:
return cls(
text=str(json['text']),
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
)
@@ -236,6 +262,9 @@ class CSSStyleSheetHeader:
#: Whether the sourceURL field value comes from the sourceURL comment.
has_source_url: typing.Optional[bool] = None
+ #: If the style sheet was loaded from a network resource, this indicates when the resource failed to load
+ loading_failed: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['styleSheetId'] = self.style_sheet_id.to_json()
@@ -258,6 +287,8 @@ def to_json(self) -> T_JSON_DICT:
json['ownerNode'] = self.owner_node.to_json()
if self.has_source_url is not None:
json['hasSourceURL'] = self.has_source_url
+ if self.loading_failed is not None:
+ json['loadingFailed'] = self.loading_failed
return json
@classmethod
@@ -277,9 +308,10 @@ def from_json(cls, json: T_JSON_DICT) -> CSSStyleSheetHeader:
length=float(json['length']),
end_line=float(json['endLine']),
end_column=float(json['endColumn']),
- source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None,
- owner_node=dom.BackendNodeId.from_json(json['ownerNode']) if 'ownerNode' in json else None,
- has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
+ source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None,
+ owner_node=dom.BackendNodeId.from_json(json['ownerNode']) if json.get('ownerNode', None) is not None else None,
+ has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None,
+ loading_failed=bool(json['loadingFailed']) if json.get('loadingFailed', None) is not None else None,
)
@@ -301,6 +333,9 @@ class CSSRule:
#: stylesheet rules) this rule came from.
style_sheet_id: typing.Optional[StyleSheetId] = None
+ #: Array of selectors from ancestor style rules, sorted by distance from the current rule.
+ nesting_selectors: typing.Optional[typing.List[str]] = None
+
#: Media list array (for rules involving media queries). The array enumerates media queries
#: starting with the innermost one, going outwards.
media: typing.Optional[typing.List[CSSMedia]] = None
@@ -313,6 +348,14 @@ class CSSRule:
#: The array enumerates @supports at-rules starting with the innermost one, going outwards.
supports: typing.Optional[typing.List[CSSSupports]] = None
+ #: Cascade layer array. Contains the layer hierarchy that this rule belongs to starting
+ #: with the innermost layer and going outwards.
+ layers: typing.Optional[typing.List[CSSLayer]] = None
+
+ #: @scope CSS at-rule array.
+ #: The array enumerates @scope at-rules starting with the innermost one, going outwards.
+ scopes: typing.Optional[typing.List[CSSScope]] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['selectorList'] = self.selector_list.to_json()
@@ -320,12 +363,18 @@ def to_json(self) -> T_JSON_DICT:
json['style'] = self.style.to_json()
if self.style_sheet_id is not None:
json['styleSheetId'] = self.style_sheet_id.to_json()
+ if self.nesting_selectors is not None:
+ json['nestingSelectors'] = [i for i in self.nesting_selectors]
if self.media is not None:
json['media'] = [i.to_json() for i in self.media]
if self.container_queries is not None:
json['containerQueries'] = [i.to_json() for i in self.container_queries]
if self.supports is not None:
json['supports'] = [i.to_json() for i in self.supports]
+ if self.layers is not None:
+ json['layers'] = [i.to_json() for i in self.layers]
+ if self.scopes is not None:
+ json['scopes'] = [i.to_json() for i in self.scopes]
return json
@classmethod
@@ -334,10 +383,13 @@ def from_json(cls, json: T_JSON_DICT) -> CSSRule:
selector_list=SelectorList.from_json(json['selectorList']),
origin=StyleSheetOrigin.from_json(json['origin']),
style=CSSStyle.from_json(json['style']),
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
- media=[CSSMedia.from_json(i) for i in json['media']] if 'media' in json else None,
- container_queries=[CSSContainerQuery.from_json(i) for i in json['containerQueries']] if 'containerQueries' in json else None,
- supports=[CSSSupports.from_json(i) for i in json['supports']] if 'supports' in json else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ nesting_selectors=[str(i) for i in json['nestingSelectors']] if json.get('nestingSelectors', None) is not None else None,
+ media=[CSSMedia.from_json(i) for i in json['media']] if json.get('media', None) is not None else None,
+ container_queries=[CSSContainerQuery.from_json(i) for i in json['containerQueries']] if json.get('containerQueries', None) is not None else None,
+ supports=[CSSSupports.from_json(i) for i in json['supports']] if json.get('supports', None) is not None else None,
+ layers=[CSSLayer.from_json(i) for i in json['layers']] if json.get('layers', None) is not None else None,
+ scopes=[CSSScope.from_json(i) for i in json['scopes']] if json.get('scopes', None) is not None else None,
)
@@ -436,7 +488,7 @@ def from_json(cls, json: T_JSON_DICT) -> ShorthandEntry:
return cls(
name=str(json['name']),
value=str(json['value']),
- important=bool(json['important']) if 'important' in json else None,
+ important=bool(json['important']) if json.get('important', None) is not None else None,
)
@@ -500,9 +552,9 @@ def from_json(cls, json: T_JSON_DICT) -> CSSStyle:
return cls(
css_properties=[CSSProperty.from_json(i) for i in json['cssProperties']],
shorthand_entries=[ShorthandEntry.from_json(i) for i in json['shorthandEntries']],
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
- css_text=str(json['cssText']) if 'cssText' in json else None,
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ css_text=str(json['cssText']) if json.get('cssText', None) is not None else None,
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
)
@@ -535,6 +587,10 @@ class CSSProperty:
#: The entire property range in the enclosing style declaration (if available).
range_: typing.Optional[SourceRange] = None
+ #: Parsed longhand components of this property if it is a shorthand.
+ #: This field will be empty if the given property is not a shorthand.
+ longhand_properties: typing.Optional[typing.List[CSSProperty]] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['name'] = self.name
@@ -551,6 +607,8 @@ def to_json(self) -> T_JSON_DICT:
json['disabled'] = self.disabled
if self.range_ is not None:
json['range'] = self.range_.to_json()
+ if self.longhand_properties is not None:
+ json['longhandProperties'] = [i.to_json() for i in self.longhand_properties]
return json
@classmethod
@@ -558,12 +616,13 @@ def from_json(cls, json: T_JSON_DICT) -> CSSProperty:
return cls(
name=str(json['name']),
value=str(json['value']),
- important=bool(json['important']) if 'important' in json else None,
- implicit=bool(json['implicit']) if 'implicit' in json else None,
- text=str(json['text']) if 'text' in json else None,
- parsed_ok=bool(json['parsedOk']) if 'parsedOk' in json else None,
- disabled=bool(json['disabled']) if 'disabled' in json else None,
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
+ important=bool(json['important']) if json.get('important', None) is not None else None,
+ implicit=bool(json['implicit']) if json.get('implicit', None) is not None else None,
+ text=str(json['text']) if json.get('text', None) is not None else None,
+ parsed_ok=bool(json['parsedOk']) if json.get('parsedOk', None) is not None else None,
+ disabled=bool(json['disabled']) if json.get('disabled', None) is not None else None,
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ longhand_properties=[CSSProperty.from_json(i) for i in json['longhandProperties']] if json.get('longhandProperties', None) is not None else None,
)
@@ -613,10 +672,10 @@ def from_json(cls, json: T_JSON_DICT) -> CSSMedia:
return cls(
text=str(json['text']),
source=str(json['source']),
- source_url=str(json['sourceURL']) if 'sourceURL' in json else None,
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
- media_list=[MediaQuery.from_json(i) for i in json['mediaList']] if 'mediaList' in json else None,
+ source_url=str(json['sourceURL']) if json.get('sourceURL', None) is not None else None,
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ media_list=[MediaQuery.from_json(i) for i in json['mediaList']] if json.get('mediaList', None) is not None else None,
)
@@ -682,8 +741,8 @@ def from_json(cls, json: T_JSON_DICT) -> MediaQueryExpression:
value=float(json['value']),
unit=str(json['unit']),
feature=str(json['feature']),
- value_range=SourceRange.from_json(json['valueRange']) if 'valueRange' in json else None,
- computed_length=float(json['computedLength']) if 'computedLength' in json else None,
+ value_range=SourceRange.from_json(json['valueRange']) if json.get('valueRange', None) is not None else None,
+ computed_length=float(json['computedLength']) if json.get('computedLength', None) is not None else None,
)
@@ -705,6 +764,12 @@ class CSSContainerQuery:
#: Optional name for the container.
name: typing.Optional[str] = None
+ #: Optional physical axes queried for the container.
+ physical_axes: typing.Optional[dom.PhysicalAxes] = None
+
+ #: Optional logical axes queried for the container.
+ logical_axes: typing.Optional[dom.LogicalAxes] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['text'] = self.text
@@ -714,15 +779,21 @@ def to_json(self) -> T_JSON_DICT:
json['styleSheetId'] = self.style_sheet_id.to_json()
if self.name is not None:
json['name'] = self.name
+ if self.physical_axes is not None:
+ json['physicalAxes'] = self.physical_axes.to_json()
+ if self.logical_axes is not None:
+ json['logicalAxes'] = self.logical_axes.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> CSSContainerQuery:
return cls(
text=str(json['text']),
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
- name=str(json['name']) if 'name' in json else None,
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ name=str(json['name']) if json.get('name', None) is not None else None,
+ physical_axes=dom.PhysicalAxes.from_json(json['physicalAxes']) if json.get('physicalAxes', None) is not None else None,
+ logical_axes=dom.LogicalAxes.from_json(json['logicalAxes']) if json.get('logicalAxes', None) is not None else None,
)
@@ -734,6 +805,9 @@ class CSSSupports:
#: Supports rule text.
text: str
+ #: Whether the supports condition is satisfied.
+ active: bool
+
#: The associated rule header range in the enclosing stylesheet (if
#: available).
range_: typing.Optional[SourceRange] = None
@@ -744,6 +818,7 @@ class CSSSupports:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['text'] = self.text
+ json['active'] = self.active
if self.range_ is not None:
json['range'] = self.range_.to_json()
if self.style_sheet_id is not None:
@@ -754,8 +829,107 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> CSSSupports:
return cls(
text=str(json['text']),
- range_=SourceRange.from_json(json['range']) if 'range' in json else None,
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
+ active=bool(json['active']),
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ )
+
+
+@dataclass
+class CSSScope:
+ '''
+ CSS Scope at-rule descriptor.
+ '''
+ #: Scope rule text.
+ text: str
+
+ #: The associated rule header range in the enclosing stylesheet (if
+ #: available).
+ range_: typing.Optional[SourceRange] = None
+
+ #: Identifier of the stylesheet containing this object (if exists).
+ style_sheet_id: typing.Optional[StyleSheetId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['text'] = self.text
+ if self.range_ is not None:
+ json['range'] = self.range_.to_json()
+ if self.style_sheet_id is not None:
+ json['styleSheetId'] = self.style_sheet_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSScope:
+ return cls(
+ text=str(json['text']),
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ )
+
+
+@dataclass
+class CSSLayer:
+ '''
+ CSS Layer at-rule descriptor.
+ '''
+ #: Layer name.
+ text: str
+
+ #: The associated rule header range in the enclosing stylesheet (if
+ #: available).
+ range_: typing.Optional[SourceRange] = None
+
+ #: Identifier of the stylesheet containing this object (if exists).
+ style_sheet_id: typing.Optional[StyleSheetId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['text'] = self.text
+ if self.range_ is not None:
+ json['range'] = self.range_.to_json()
+ if self.style_sheet_id is not None:
+ json['styleSheetId'] = self.style_sheet_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSLayer:
+ return cls(
+ text=str(json['text']),
+ range_=SourceRange.from_json(json['range']) if json.get('range', None) is not None else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ )
+
+
+@dataclass
+class CSSLayerData:
+ '''
+ CSS Layer data.
+ '''
+ #: Layer name.
+ name: str
+
+ #: Layer order. The order determines the order of the layer in the cascade order.
+ #: A higher number has higher priority in the cascade order.
+ order: float
+
+ #: Direct sub-layers
+ sub_layers: typing.Optional[typing.List[CSSLayerData]] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name
+ json['order'] = self.order
+ if self.sub_layers is not None:
+ json['subLayers'] = [i.to_json() for i in self.sub_layers]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSLayerData:
+ return cls(
+ name=str(json['name']),
+ order=float(json['order']),
+ sub_layers=[CSSLayerData.from_json(i) for i in json['subLayers']] if json.get('subLayers', None) is not None else None,
)
@@ -850,6 +1024,9 @@ class FontFace:
#: The font-stretch.
font_stretch: str
+ #: The font-display.
+ font_display: str
+
#: The unicode-range.
unicode_range: str
@@ -869,6 +1046,7 @@ def to_json(self) -> T_JSON_DICT:
json['fontVariant'] = self.font_variant
json['fontWeight'] = self.font_weight
json['fontStretch'] = self.font_stretch
+ json['fontDisplay'] = self.font_display
json['unicodeRange'] = self.unicode_range
json['src'] = self.src
json['platformFontFamily'] = self.platform_font_family
@@ -884,10 +1062,67 @@ def from_json(cls, json: T_JSON_DICT) -> FontFace:
font_variant=str(json['fontVariant']),
font_weight=str(json['fontWeight']),
font_stretch=str(json['fontStretch']),
+ font_display=str(json['fontDisplay']),
unicode_range=str(json['unicodeRange']),
src=str(json['src']),
platform_font_family=str(json['platformFontFamily']),
- font_variation_axes=[FontVariationAxis.from_json(i) for i in json['fontVariationAxes']] if 'fontVariationAxes' in json else None,
+ font_variation_axes=[FontVariationAxis.from_json(i) for i in json['fontVariationAxes']] if json.get('fontVariationAxes', None) is not None else None,
+ )
+
+
+@dataclass
+class CSSTryRule:
+ '''
+ CSS try rule representation.
+ '''
+ #: Parent stylesheet's origin.
+ origin: StyleSheetOrigin
+
+ #: Associated style declaration.
+ style: CSSStyle
+
+ #: The css style sheet identifier (absent for user agent stylesheet and user-specified
+ #: stylesheet rules) this rule came from.
+ style_sheet_id: typing.Optional[StyleSheetId] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['origin'] = self.origin.to_json()
+ json['style'] = self.style.to_json()
+ if self.style_sheet_id is not None:
+ json['styleSheetId'] = self.style_sheet_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSTryRule:
+ return cls(
+ origin=StyleSheetOrigin.from_json(json['origin']),
+ style=CSSStyle.from_json(json['style']),
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
+ )
+
+
+@dataclass
+class CSSPositionFallbackRule:
+ '''
+ CSS position-fallback rule representation.
+ '''
+ name: Value
+
+ #: List of keyframes.
+ try_rules: typing.List[CSSTryRule]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['name'] = self.name.to_json()
+ json['tryRules'] = [i.to_json() for i in self.try_rules]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CSSPositionFallbackRule:
+ return cls(
+ name=Value.from_json(json['name']),
+ try_rules=[CSSTryRule.from_json(i) for i in json['tryRules']],
)
@@ -949,7 +1184,7 @@ def from_json(cls, json: T_JSON_DICT) -> CSSKeyframeRule:
origin=StyleSheetOrigin.from_json(json['origin']),
key_text=Value.from_json(json['keyText']),
style=CSSStyle.from_json(json['style']),
- style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if 'styleSheetId' in json else None,
+ style_sheet_id=StyleSheetId.from_json(json['styleSheetId']) if json.get('styleSheetId', None) is not None else None,
)
@@ -1108,9 +1343,9 @@ def get_background_colors(
}
json = yield cmd_dict
return (
- [str(i) for i in json['backgroundColors']] if 'backgroundColors' in json else None,
- str(json['computedFontSize']) if 'computedFontSize' in json else None,
- str(json['computedFontWeight']) if 'computedFontWeight' in json else None
+ [str(i) for i in json['backgroundColors']] if json.get('backgroundColors', None) is not None else None,
+ str(json['computedFontSize']) if json.get('computedFontSize', None) is not None else None,
+ str(json['computedFontWeight']) if json.get('computedFontWeight', None) is not None else None
)
@@ -1154,14 +1389,14 @@ def get_inline_styles_for_node(
}
json = yield cmd_dict
return (
- CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None,
- CSSStyle.from_json(json['attributesStyle']) if 'attributesStyle' in json else None
+ CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None,
+ CSSStyle.from_json(json['attributesStyle']) if json.get('attributesStyle', None) is not None else None
)
def get_matched_styles_for_node(
node_id: dom.NodeId
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[CSSStyle], typing.Optional[CSSStyle], typing.Optional[typing.List[RuleMatch]], typing.Optional[typing.List[PseudoElementMatches]], typing.Optional[typing.List[InheritedStyleEntry]], typing.Optional[typing.List[CSSKeyframesRule]]]]:
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[CSSStyle], typing.Optional[CSSStyle], typing.Optional[typing.List[RuleMatch]], typing.Optional[typing.List[PseudoElementMatches]], typing.Optional[typing.List[InheritedStyleEntry]], typing.Optional[typing.List[InheritedPseudoElementMatches]], typing.Optional[typing.List[CSSKeyframesRule]], typing.Optional[typing.List[CSSPositionFallbackRule]], typing.Optional[dom.NodeId]]]:
'''
Returns requested styles for a DOM node identified by ``nodeId``.
@@ -1173,7 +1408,10 @@ def get_matched_styles_for_node(
2. **matchedCSSRules** - *(Optional)* CSS rules matching this node, from all applicable stylesheets.
3. **pseudoElements** - *(Optional)* Pseudo style matches for this node.
4. **inherited** - *(Optional)* A chain of inherited styles (from the immediate node parent up to the DOM tree root).
- 5. **cssKeyframesRules** - *(Optional)* A list of CSS keyframed animations matching this node.
+ 5. **inheritedPseudoElements** - *(Optional)* A chain of inherited pseudo element styles (from the immediate node parent up to the DOM tree root).
+ 6. **cssKeyframesRules** - *(Optional)* A list of CSS keyframed animations matching this node.
+ 7. **cssPositionFallbackRules** - *(Optional)* A list of CSS position fallbacks matching this node.
+ 8. **parentLayoutNodeId** - *(Optional)* Id of the first parent element that does not have display: contents.
'''
params: T_JSON_DICT = dict()
params['nodeId'] = node_id.to_json()
@@ -1183,12 +1421,15 @@ def get_matched_styles_for_node(
}
json = yield cmd_dict
return (
- CSSStyle.from_json(json['inlineStyle']) if 'inlineStyle' in json else None,
- CSSStyle.from_json(json['attributesStyle']) if 'attributesStyle' in json else None,
- [RuleMatch.from_json(i) for i in json['matchedCSSRules']] if 'matchedCSSRules' in json else None,
- [PseudoElementMatches.from_json(i) for i in json['pseudoElements']] if 'pseudoElements' in json else None,
- [InheritedStyleEntry.from_json(i) for i in json['inherited']] if 'inherited' in json else None,
- [CSSKeyframesRule.from_json(i) for i in json['cssKeyframesRules']] if 'cssKeyframesRules' in json else None
+ CSSStyle.from_json(json['inlineStyle']) if json.get('inlineStyle', None) is not None else None,
+ CSSStyle.from_json(json['attributesStyle']) if json.get('attributesStyle', None) is not None else None,
+ [RuleMatch.from_json(i) for i in json['matchedCSSRules']] if json.get('matchedCSSRules', None) is not None else None,
+ [PseudoElementMatches.from_json(i) for i in json['pseudoElements']] if json.get('pseudoElements', None) is not None else None,
+ [InheritedStyleEntry.from_json(i) for i in json['inherited']] if json.get('inherited', None) is not None else None,
+ [InheritedPseudoElementMatches.from_json(i) for i in json['inheritedPseudoElements']] if json.get('inheritedPseudoElements', None) is not None else None,
+ [CSSKeyframesRule.from_json(i) for i in json['cssKeyframesRules']] if json.get('cssKeyframesRules', None) is not None else None,
+ [CSSPositionFallbackRule.from_json(i) for i in json['cssPositionFallbackRules']] if json.get('cssPositionFallbackRules', None) is not None else None,
+ dom.NodeId.from_json(json['parentLayoutNodeId']) if json.get('parentLayoutNodeId', None) is not None else None
)
@@ -1244,6 +1485,30 @@ def get_style_sheet_text(
return str(json['text'])
+def get_layers_for_node(
+ node_id: dom.NodeId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSLayerData]:
+ '''
+ Returns all layers parsed by the rendering engine for the tree scope of a node.
+ Given a DOM element identified by nodeId, getLayersForNode returns the root
+ layer for the nearest ancestor document or shadow root. The layer root contains
+ the full layer tree for the tree scope and their ordering.
+
+ **EXPERIMENTAL**
+
+ :param node_id:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['nodeId'] = node_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.getLayersForNode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return CSSLayerData.from_json(json['rootLayer'])
+
+
def track_computed_style_updates(
properties_to_track: typing.List[CSSComputedStyleProperty]
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -1274,7 +1539,7 @@ def take_computed_style_updates() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,ty
**EXPERIMENTAL**
- :returns: The list of node Ids that have their tracked computed styles updated
+ :returns: The list of node Ids that have their tracked computed styles updated.
'''
cmd_dict: T_JSON_DICT = {
'method': 'CSS.takeComputedStyleUpdates',
@@ -1411,6 +1676,33 @@ def set_supports_text(
return CSSSupports.from_json(json['supports'])
+def set_scope_text(
+ style_sheet_id: StyleSheetId,
+ range_: SourceRange,
+ text: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,CSSScope]:
+ '''
+ Modifies the expression of a scope at-rule.
+
+ **EXPERIMENTAL**
+
+ :param style_sheet_id:
+ :param range_:
+ :param text:
+ :returns: The resulting CSS Scope rule after modification.
+ '''
+ params: T_JSON_DICT = dict()
+ params['styleSheetId'] = style_sheet_id.to_json()
+ params['range'] = range_.to_json()
+ params['text'] = text
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'CSS.setScopeText',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return CSSScope.from_json(json['scope'])
+
+
def set_rule_selector(
style_sheet_id: StyleSheetId,
range_: SourceRange,
@@ -1455,7 +1747,7 @@ def set_style_sheet_text(
'params': params,
}
json = yield cmd_dict
- return str(json['sourceMapURL']) if 'sourceMapURL' in json else None
+ return str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None
def set_style_texts(
@@ -1490,7 +1782,7 @@ def start_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None
def stop_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[RuleUsage]]:
'''
Stop tracking rule usage and return the list of rules that were used since last call to
- ``takeCoverageDelta`` (or since start of coverage instrumentation)
+ ``takeCoverageDelta`` (or since start of coverage instrumentation).
:returns:
'''
@@ -1504,7 +1796,7 @@ def stop_rule_usage_tracking() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typin
def take_coverage_delta() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[RuleUsage], float]]:
'''
Obtain list of rules that became used since last call to this method (or since start of coverage
- instrumentation)
+ instrumentation).
:returns: A tuple with the following items:
@@ -1545,7 +1837,7 @@ def set_local_fonts_enabled(
class FontsUpdated:
'''
Fires whenever a web font is updated. A non-empty font parameter indicates a successfully loaded
- web font
+ web font.
'''
#: The web font that has loaded.
font: typing.Optional[FontFace]
@@ -1553,7 +1845,7 @@ class FontsUpdated:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FontsUpdated:
return cls(
- font=FontFace.from_json(json['font']) if 'font' in json else None
+ font=FontFace.from_json(json['font']) if json.get('font', None) is not None else None
)
diff --git a/pycdp/cdp/database.py b/pycdp/cdp/database.py
index b8052e4..d61d4c4 100644
--- a/pycdp/cdp/database.py
+++ b/pycdp/cdp/database.py
@@ -129,9 +129,9 @@ def execute_sql(
}
json = yield cmd_dict
return (
- [str(i) for i in json['columnNames']] if 'columnNames' in json else None,
- [i for i in json['values']] if 'values' in json else None,
- Error.from_json(json['sqlError']) if 'sqlError' in json else None
+ [str(i) for i in json['columnNames']] if json.get('columnNames', None) is not None else None,
+ [i for i in json['values']] if json.get('values', None) is not None else None,
+ Error.from_json(json['sqlError']) if json.get('sqlError', None) is not None else None
)
diff --git a/pycdp/cdp/debugger.py b/pycdp/cdp/debugger.py
index ad67882..148b57f 100644
--- a/pycdp/cdp/debugger.py
+++ b/pycdp/cdp/debugger.py
@@ -72,7 +72,7 @@ def from_json(cls, json: T_JSON_DICT) -> Location:
return cls(
script_id=runtime.ScriptId.from_json(json['scriptId']),
line_number=int(json['lineNumber']),
- column_number=int(json['columnNumber']) if 'columnNumber' in json else None,
+ column_number=int(json['columnNumber']) if json.get('columnNumber', None) is not None else None,
)
@@ -141,6 +141,8 @@ class CallFrame:
location: Location
#: JavaScript script name or url.
+ #: Deprecated in favor of using the ``location.scriptId`` to resolve the URL via a previously
+ #: sent ``Debugger.scriptParsed`` event.
url: str
#: Scope chain for this call frame.
@@ -155,6 +157,12 @@ class CallFrame:
#: The value being returned, if the function is at return point.
return_value: typing.Optional[runtime.RemoteObject] = None
+ #: Valid only while the VM is paused and indicates whether this frame
+ #: can be restarted or not. Note that a ``true`` value here does not
+ #: guarantee that Debugger#restartFrame with this CallFrameId will be
+ #: successful, but it is very likely.
+ can_be_restarted: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['callFrameId'] = self.call_frame_id.to_json()
@@ -167,6 +175,8 @@ def to_json(self) -> T_JSON_DICT:
json['functionLocation'] = self.function_location.to_json()
if self.return_value is not None:
json['returnValue'] = self.return_value.to_json()
+ if self.can_be_restarted is not None:
+ json['canBeRestarted'] = self.can_be_restarted
return json
@classmethod
@@ -178,8 +188,9 @@ def from_json(cls, json: T_JSON_DICT) -> CallFrame:
url=str(json['url']),
scope_chain=[Scope.from_json(i) for i in json['scopeChain']],
this=runtime.RemoteObject.from_json(json['this']),
- function_location=Location.from_json(json['functionLocation']) if 'functionLocation' in json else None,
- return_value=runtime.RemoteObject.from_json(json['returnValue']) if 'returnValue' in json else None,
+ function_location=Location.from_json(json['functionLocation']) if json.get('functionLocation', None) is not None else None,
+ return_value=runtime.RemoteObject.from_json(json['returnValue']) if json.get('returnValue', None) is not None else None,
+ can_be_restarted=bool(json['canBeRestarted']) if json.get('canBeRestarted', None) is not None else None,
)
@@ -221,9 +232,9 @@ def from_json(cls, json: T_JSON_DICT) -> Scope:
return cls(
type_=str(json['type']),
object_=runtime.RemoteObject.from_json(json['object']),
- name=str(json['name']) if 'name' in json else None,
- start_location=Location.from_json(json['startLocation']) if 'startLocation' in json else None,
- end_location=Location.from_json(json['endLocation']) if 'endLocation' in json else None,
+ name=str(json['name']) if json.get('name', None) is not None else None,
+ start_location=Location.from_json(json['startLocation']) if json.get('startLocation', None) is not None else None,
+ end_location=Location.from_json(json['endLocation']) if json.get('endLocation', None) is not None else None,
)
@@ -280,8 +291,30 @@ def from_json(cls, json: T_JSON_DICT) -> BreakLocation:
return cls(
script_id=runtime.ScriptId.from_json(json['scriptId']),
line_number=int(json['lineNumber']),
- column_number=int(json['columnNumber']) if 'columnNumber' in json else None,
- type_=str(json['type']) if 'type' in json else None,
+ column_number=int(json['columnNumber']) if json.get('columnNumber', None) is not None else None,
+ type_=str(json['type']) if json.get('type', None) is not None else None,
+ )
+
+
+@dataclass
+class WasmDisassemblyChunk:
+ #: The next chunk of disassembled lines.
+ lines: typing.List[str]
+
+ #: The bytecode offsets describing the start of each line.
+ bytecode_offsets: typing.List[int]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['lines'] = [i for i in self.lines]
+ json['bytecodeOffsets'] = [i for i in self.bytecode_offsets]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> WasmDisassemblyChunk:
+ return cls(
+ lines=[str(i) for i in json['lines']],
+ bytecode_offsets=[int(i) for i in json['bytecodeOffsets']],
)
@@ -322,7 +355,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> DebugSymbols:
return cls(
type_=str(json['type']),
- external_url=str(json['externalURL']) if 'externalURL' in json else None,
+ external_url=str(json['externalURL']) if json.get('externalURL', None) is not None else None,
)
@@ -430,7 +463,7 @@ def evaluate_on_call_frame(
json = yield cmd_dict
return (
runtime.RemoteObject.from_json(json['result']),
- runtime.ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ runtime.ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -483,10 +516,65 @@ def get_script_source(
json = yield cmd_dict
return (
str(json['scriptSource']),
- str(json['bytecode']) if 'bytecode' in json else None
+ str(json['bytecode']) if json.get('bytecode', None) is not None else None
+ )
+
+
+def disassemble_wasm_module(
+ script_id: runtime.ScriptId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[str], int, typing.List[int], WasmDisassemblyChunk]]:
+ '''
+
+
+ **EXPERIMENTAL**
+
+ :param script_id: Id of the script to disassemble
+ :returns: A tuple with the following items:
+
+ 0. **streamId** - *(Optional)* For large modules, return a stream from which additional chunks of disassembly can be read successively.
+ 1. **totalNumberOfLines** - The total number of lines in the disassembly text.
+ 2. **functionBodyOffsets** - The offsets of all function bodies, in the format [start1, end1, start2, end2, ...] where all ends are exclusive.
+ 3. **chunk** - The first chunk of disassembly.
+ '''
+ params: T_JSON_DICT = dict()
+ params['scriptId'] = script_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Debugger.disassembleWasmModule',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return (
+ str(json['streamId']) if json.get('streamId', None) is not None else None,
+ int(json['totalNumberOfLines']),
+ [int(i) for i in json['functionBodyOffsets']],
+ WasmDisassemblyChunk.from_json(json['chunk'])
)
+def next_wasm_disassembly_chunk(
+ stream_id: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,WasmDisassemblyChunk]:
+ '''
+ Disassemble the next chunk of lines for the module corresponding to the
+ stream. If disassembly is complete, this API will invalidate the streamId
+ and return an empty chunk. Any subsequent calls for the now invalid stream
+ will return errors.
+
+ **EXPERIMENTAL**
+
+ :param stream_id:
+ :returns: The next chunk of disassembly.
+ '''
+ params: T_JSON_DICT = dict()
+ params['streamId'] = stream_id
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Debugger.nextWasmDisassemblyChunk',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return WasmDisassemblyChunk.from_json(json['chunk'])
+
+
@deprecated(version="1.3")
def get_wasm_bytecode(
script_id: runtime.ScriptId
@@ -579,16 +667,27 @@ def remove_breakpoint(
json = yield cmd_dict
-@deprecated(version="1.3")
def restart_frame(
- call_frame_id: CallFrameId
+ call_frame_id: CallFrameId,
+ mode: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[CallFrame], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId]]]:
'''
- Restarts particular call frame from the beginning.
+ Restarts particular call frame from the beginning. The old, deprecated
+ behavior of ``restartFrame`` is to stay paused and allow further CDP commands
+ after a restart was scheduled. This can cause problems with restarting, so
+ we now continue execution immediatly after it has been scheduled until we
+ reach the beginning of the restarted frame.
- .. deprecated:: 1.3
+ To stay back-wards compatible, ``restartFrame`` now expects a ``mode``
+ parameter to be present. If the ``mode`` parameter is missing, ``restartFrame``
+ errors out.
+
+ The various return values are deprecated and ``callFrames`` is always empty.
+ Use the call frames from the ``Debugger#paused`` events instead, that fires
+ once V8 pauses at the beginning of the restarted function.
:param call_frame_id: Call frame identifier to evaluate on.
+ :param mode: **(EXPERIMENTAL)** *(Optional)* The ```mode```` parameter must be present and set to 'StepInto', otherwise ````restartFrame``` will error out.
:returns: A tuple with the following items:
0. **callFrames** - New stack trace.
@@ -597,6 +696,8 @@ def restart_frame(
'''
params: T_JSON_DICT = dict()
params['callFrameId'] = call_frame_id.to_json()
+ if mode is not None:
+ params['mode'] = mode
cmd_dict: T_JSON_DICT = {
'method': 'Debugger.restartFrame',
'params': params,
@@ -604,8 +705,8 @@ def restart_frame(
json = yield cmd_dict
return (
[CallFrame.from_json(i) for i in json['callFrames']],
- runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None,
- runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None
+ runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None,
+ runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None
)
@@ -864,8 +965,8 @@ def set_pause_on_exceptions(
state: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
- Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions or
- no exceptions. Initial pause on exceptions state is ``none``.
+ Defines pause on exceptions state. Can be set to stop on all exceptions, uncaught exceptions,
+ or caught exceptions, no exceptions. Initial pause on exceptions state is ``none``.
:param state: Pause on exceptions mode.
'''
@@ -900,38 +1001,50 @@ def set_return_value(
def set_script_source(
script_id: runtime.ScriptId,
script_source: str,
- dry_run: typing.Optional[bool] = None
- ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[typing.List[CallFrame]], typing.Optional[bool], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId], typing.Optional[runtime.ExceptionDetails]]]:
+ dry_run: typing.Optional[bool] = None,
+ allow_top_frame_editing: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[typing.List[CallFrame]], typing.Optional[bool], typing.Optional[runtime.StackTrace], typing.Optional[runtime.StackTraceId], str, typing.Optional[runtime.ExceptionDetails]]]:
'''
Edits JavaScript source live.
+ In general, functions that are currently on the stack can not be edited with
+ a single exception: If the edited function is the top-most stack frame and
+ that is the only activation of that function on the stack. In this case
+ the live edit will be successful and a ``Debugger.restartFrame`` for the
+ top-most function is automatically triggered.
+
:param script_id: Id of the script to edit.
:param script_source: New content of the script.
:param dry_run: *(Optional)* If true the change will not actually be applied. Dry run may be used to get result description without actually modifying the code.
+ :param allow_top_frame_editing: **(EXPERIMENTAL)** *(Optional)* If true, then ```scriptSource```` is allowed to change the function on top of the stack as long as the top-most stack frame is the only activation of that function.
:returns: A tuple with the following items:
0. **callFrames** - *(Optional)* New stack trace in case editing has happened while VM was stopped.
1. **stackChanged** - *(Optional)* Whether current call stack was modified after applying the changes.
2. **asyncStackTrace** - *(Optional)* Async stack trace, if any.
3. **asyncStackTraceId** - *(Optional)* Async stack trace, if any.
- 4. **exceptionDetails** - *(Optional)* Exception details if any.
+ 4. **status** - Whether the operation was successful or not. Only `` Ok`` denotes a successful live edit while the other enum variants denote why the live edit failed.
+ 5. **exceptionDetails** - *(Optional)* Exception details if any. Only present when `` status`` is `` CompileError`.
'''
params: T_JSON_DICT = dict()
params['scriptId'] = script_id.to_json()
params['scriptSource'] = script_source
if dry_run is not None:
params['dryRun'] = dry_run
+ if allow_top_frame_editing is not None:
+ params['allowTopFrameEditing'] = allow_top_frame_editing
cmd_dict: T_JSON_DICT = {
'method': 'Debugger.setScriptSource',
'params': params,
}
json = yield cmd_dict
return (
- [CallFrame.from_json(i) for i in json['callFrames']] if 'callFrames' in json else None,
- bool(json['stackChanged']) if 'stackChanged' in json else None,
- runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None,
- runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None,
- runtime.ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ [CallFrame.from_json(i) for i in json['callFrames']] if json.get('callFrames', None) is not None else None,
+ bool(json['stackChanged']) if json.get('stackChanged', None) is not None else None,
+ runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None,
+ runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None,
+ str(json['status']),
+ runtime.ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -1074,11 +1187,11 @@ def from_json(cls, json: T_JSON_DICT) -> Paused:
return cls(
call_frames=[CallFrame.from_json(i) for i in json['callFrames']],
reason=str(json['reason']),
- data=dict(json['data']) if 'data' in json else None,
- hit_breakpoints=[str(i) for i in json['hitBreakpoints']] if 'hitBreakpoints' in json else None,
- async_stack_trace=runtime.StackTrace.from_json(json['asyncStackTrace']) if 'asyncStackTrace' in json else None,
- async_stack_trace_id=runtime.StackTraceId.from_json(json['asyncStackTraceId']) if 'asyncStackTraceId' in json else None,
- async_call_stack_trace_id=runtime.StackTraceId.from_json(json['asyncCallStackTraceId']) if 'asyncCallStackTraceId' in json else None
+ data=dict(json['data']) if json.get('data', None) is not None else None,
+ hit_breakpoints=[str(i) for i in json['hitBreakpoints']] if json.get('hitBreakpoints', None) is not None else None,
+ async_stack_trace=runtime.StackTrace.from_json(json['asyncStackTrace']) if json.get('asyncStackTrace', None) is not None else None,
+ async_stack_trace_id=runtime.StackTraceId.from_json(json['asyncStackTraceId']) if json.get('asyncStackTraceId', None) is not None else None,
+ async_call_stack_trace_id=runtime.StackTraceId.from_json(json['asyncCallStackTraceId']) if json.get('asyncCallStackTraceId', None) is not None else None
)
@@ -1117,7 +1230,7 @@ class ScriptFailedToParse:
end_column: int
#: Specifies script creation context.
execution_context_id: runtime.ExecutionContextId
- #: Content hash of the script.
+ #: Content hash of the script, SHA-256.
hash_: str
#: Embedder-specific auxiliary data.
execution_context_aux_data: typing.Optional[dict]
@@ -1149,15 +1262,15 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptFailedToParse:
end_column=int(json['endColumn']),
execution_context_id=runtime.ExecutionContextId.from_json(json['executionContextId']),
hash_=str(json['hash']),
- execution_context_aux_data=dict(json['executionContextAuxData']) if 'executionContextAuxData' in json else None,
- source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None,
- has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
- is_module=bool(json['isModule']) if 'isModule' in json else None,
- length=int(json['length']) if 'length' in json else None,
- stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
- code_offset=int(json['codeOffset']) if 'codeOffset' in json else None,
- script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None,
- embedder_name=str(json['embedderName']) if 'embedderName' in json else None
+ execution_context_aux_data=dict(json['executionContextAuxData']) if json.get('executionContextAuxData', None) is not None else None,
+ source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None,
+ has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None,
+ is_module=bool(json['isModule']) if json.get('isModule', None) is not None else None,
+ length=int(json['length']) if json.get('length', None) is not None else None,
+ stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None,
+ code_offset=int(json['codeOffset']) if json.get('codeOffset', None) is not None else None,
+ script_language=ScriptLanguage.from_json(json['scriptLanguage']) if json.get('scriptLanguage', None) is not None else None,
+ embedder_name=str(json['embedderName']) if json.get('embedderName', None) is not None else None
)
@@ -1182,7 +1295,7 @@ class ScriptParsed:
end_column: int
#: Specifies script creation context.
execution_context_id: runtime.ExecutionContextId
- #: Content hash of the script.
+ #: Content hash of the script, SHA-256.
hash_: str
#: Embedder-specific auxiliary data.
execution_context_aux_data: typing.Optional[dict]
@@ -1218,15 +1331,15 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptParsed:
end_column=int(json['endColumn']),
execution_context_id=runtime.ExecutionContextId.from_json(json['executionContextId']),
hash_=str(json['hash']),
- execution_context_aux_data=dict(json['executionContextAuxData']) if 'executionContextAuxData' in json else None,
- is_live_edit=bool(json['isLiveEdit']) if 'isLiveEdit' in json else None,
- source_map_url=str(json['sourceMapURL']) if 'sourceMapURL' in json else None,
- has_source_url=bool(json['hasSourceURL']) if 'hasSourceURL' in json else None,
- is_module=bool(json['isModule']) if 'isModule' in json else None,
- length=int(json['length']) if 'length' in json else None,
- stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
- code_offset=int(json['codeOffset']) if 'codeOffset' in json else None,
- script_language=ScriptLanguage.from_json(json['scriptLanguage']) if 'scriptLanguage' in json else None,
- debug_symbols=DebugSymbols.from_json(json['debugSymbols']) if 'debugSymbols' in json else None,
- embedder_name=str(json['embedderName']) if 'embedderName' in json else None
+ execution_context_aux_data=dict(json['executionContextAuxData']) if json.get('executionContextAuxData', None) is not None else None,
+ is_live_edit=bool(json['isLiveEdit']) if json.get('isLiveEdit', None) is not None else None,
+ source_map_url=str(json['sourceMapURL']) if json.get('sourceMapURL', None) is not None else None,
+ has_source_url=bool(json['hasSourceURL']) if json.get('hasSourceURL', None) is not None else None,
+ is_module=bool(json['isModule']) if json.get('isModule', None) is not None else None,
+ length=int(json['length']) if json.get('length', None) is not None else None,
+ stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None,
+ code_offset=int(json['codeOffset']) if json.get('codeOffset', None) is not None else None,
+ script_language=ScriptLanguage.from_json(json['scriptLanguage']) if json.get('scriptLanguage', None) is not None else None,
+ debug_symbols=DebugSymbols.from_json(json['debugSymbols']) if json.get('debugSymbols', None) is not None else None,
+ embedder_name=str(json['embedderName']) if json.get('embedderName', None) is not None else None
)
diff --git a/pycdp/cdp/dom.py b/pycdp/cdp/dom.py
index dd5a8ee..e8ebf69 100644
--- a/pycdp/cdp/dom.py
+++ b/pycdp/cdp/dom.py
@@ -100,10 +100,11 @@ class PseudoType(enum.Enum):
SCROLLBAR_CORNER = "scrollbar-corner"
RESIZER = "resizer"
INPUT_LIST_BUTTON = "input-list-button"
- TRANSITION = "transition"
- TRANSITION_CONTAINER = "transition-container"
- TRANSITION_OLD_CONTENT = "transition-old-content"
- TRANSITION_NEW_CONTENT = "transition-new-content"
+ VIEW_TRANSITION = "view-transition"
+ VIEW_TRANSITION_GROUP = "view-transition-group"
+ VIEW_TRANSITION_IMAGE_PAIR = "view-transition-image-pair"
+ VIEW_TRANSITION_OLD = "view-transition-old"
+ VIEW_TRANSITION_NEW = "view-transition-new"
def to_json(self) -> str:
return self.value
@@ -145,6 +146,38 @@ def from_json(cls, json: str) -> CompatibilityMode:
return cls(json)
+class PhysicalAxes(enum.Enum):
+ '''
+ ContainerSelector physical axes
+ '''
+ HORIZONTAL = "Horizontal"
+ VERTICAL = "Vertical"
+ BOTH = "Both"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> PhysicalAxes:
+ return cls(json)
+
+
+class LogicalAxes(enum.Enum):
+ '''
+ ContainerSelector logical axes
+ '''
+ INLINE = "Inline"
+ BLOCK = "Block"
+ BOTH = "Both"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> LogicalAxes:
+ return cls(json)
+
+
@dataclass
class Node:
'''
@@ -210,6 +243,10 @@ class Node:
#: Pseudo element type for this node.
pseudo_type: typing.Optional[PseudoType] = None
+ #: Pseudo element identifier for this node. Only present if there is a
+ #: valid pseudoType.
+ pseudo_identifier: typing.Optional[str] = None
+
#: Shadow root type.
shadow_root_type: typing.Optional[ShadowRootType] = None
@@ -241,6 +278,8 @@ class Node:
compatibility_mode: typing.Optional[CompatibilityMode] = None
+ assigned_slot: typing.Optional[BackendNode] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['nodeId'] = self.node_id.to_json()
@@ -275,6 +314,8 @@ def to_json(self) -> T_JSON_DICT:
json['value'] = self.value
if self.pseudo_type is not None:
json['pseudoType'] = self.pseudo_type.to_json()
+ if self.pseudo_identifier is not None:
+ json['pseudoIdentifier'] = self.pseudo_identifier
if self.shadow_root_type is not None:
json['shadowRootType'] = self.shadow_root_type.to_json()
if self.frame_id is not None:
@@ -295,6 +336,8 @@ def to_json(self) -> T_JSON_DICT:
json['isSVG'] = self.is_svg
if self.compatibility_mode is not None:
json['compatibilityMode'] = self.compatibility_mode.to_json()
+ if self.assigned_slot is not None:
+ json['assignedSlot'] = self.assigned_slot.to_json()
return json
@classmethod
@@ -306,29 +349,31 @@ def from_json(cls, json: T_JSON_DICT) -> Node:
node_name=str(json['nodeName']),
local_name=str(json['localName']),
node_value=str(json['nodeValue']),
- parent_id=NodeId.from_json(json['parentId']) if 'parentId' in json else None,
- child_node_count=int(json['childNodeCount']) if 'childNodeCount' in json else None,
- children=[Node.from_json(i) for i in json['children']] if 'children' in json else None,
- attributes=[str(i) for i in json['attributes']] if 'attributes' in json else None,
- document_url=str(json['documentURL']) if 'documentURL' in json else None,
- base_url=str(json['baseURL']) if 'baseURL' in json else None,
- public_id=str(json['publicId']) if 'publicId' in json else None,
- system_id=str(json['systemId']) if 'systemId' in json else None,
- internal_subset=str(json['internalSubset']) if 'internalSubset' in json else None,
- xml_version=str(json['xmlVersion']) if 'xmlVersion' in json else None,
- name=str(json['name']) if 'name' in json else None,
- value=str(json['value']) if 'value' in json else None,
- pseudo_type=PseudoType.from_json(json['pseudoType']) if 'pseudoType' in json else None,
- shadow_root_type=ShadowRootType.from_json(json['shadowRootType']) if 'shadowRootType' in json else None,
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
- content_document=Node.from_json(json['contentDocument']) if 'contentDocument' in json else None,
- shadow_roots=[Node.from_json(i) for i in json['shadowRoots']] if 'shadowRoots' in json else None,
- template_content=Node.from_json(json['templateContent']) if 'templateContent' in json else None,
- pseudo_elements=[Node.from_json(i) for i in json['pseudoElements']] if 'pseudoElements' in json else None,
- imported_document=Node.from_json(json['importedDocument']) if 'importedDocument' in json else None,
- distributed_nodes=[BackendNode.from_json(i) for i in json['distributedNodes']] if 'distributedNodes' in json else None,
- is_svg=bool(json['isSVG']) if 'isSVG' in json else None,
- compatibility_mode=CompatibilityMode.from_json(json['compatibilityMode']) if 'compatibilityMode' in json else None,
+ parent_id=NodeId.from_json(json['parentId']) if json.get('parentId', None) is not None else None,
+ child_node_count=int(json['childNodeCount']) if json.get('childNodeCount', None) is not None else None,
+ children=[Node.from_json(i) for i in json['children']] if json.get('children', None) is not None else None,
+ attributes=[str(i) for i in json['attributes']] if json.get('attributes', None) is not None else None,
+ document_url=str(json['documentURL']) if json.get('documentURL', None) is not None else None,
+ base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None,
+ public_id=str(json['publicId']) if json.get('publicId', None) is not None else None,
+ system_id=str(json['systemId']) if json.get('systemId', None) is not None else None,
+ internal_subset=str(json['internalSubset']) if json.get('internalSubset', None) is not None else None,
+ xml_version=str(json['xmlVersion']) if json.get('xmlVersion', None) is not None else None,
+ name=str(json['name']) if json.get('name', None) is not None else None,
+ value=str(json['value']) if json.get('value', None) is not None else None,
+ pseudo_type=PseudoType.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None,
+ pseudo_identifier=str(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None,
+ shadow_root_type=ShadowRootType.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None,
+ content_document=Node.from_json(json['contentDocument']) if json.get('contentDocument', None) is not None else None,
+ shadow_roots=[Node.from_json(i) for i in json['shadowRoots']] if json.get('shadowRoots', None) is not None else None,
+ template_content=Node.from_json(json['templateContent']) if json.get('templateContent', None) is not None else None,
+ pseudo_elements=[Node.from_json(i) for i in json['pseudoElements']] if json.get('pseudoElements', None) is not None else None,
+ imported_document=Node.from_json(json['importedDocument']) if json.get('importedDocument', None) is not None else None,
+ distributed_nodes=[BackendNode.from_json(i) for i in json['distributedNodes']] if json.get('distributedNodes', None) is not None else None,
+ is_svg=bool(json['isSVG']) if json.get('isSVG', None) is not None else None,
+ compatibility_mode=CompatibilityMode.from_json(json['compatibilityMode']) if json.get('compatibilityMode', None) is not None else None,
+ assigned_slot=BackendNode.from_json(json['assignedSlot']) if json.get('assignedSlot', None) is not None else None,
)
@@ -364,7 +409,7 @@ def from_json(cls, json: T_JSON_DICT) -> RGBA:
r=int(json['r']),
g=int(json['g']),
b=int(json['b']),
- a=float(json['a']) if 'a' in json else None,
+ a=float(json['a']) if json.get('a', None) is not None else None,
)
@@ -430,7 +475,7 @@ def from_json(cls, json: T_JSON_DICT) -> BoxModel:
margin=Quad.from_json(json['margin']),
width=int(json['width']),
height=int(json['height']),
- shape_outside=ShapeOutsideInfo.from_json(json['shapeOutside']) if 'shapeOutside' in json else None,
+ shape_outside=ShapeOutsideInfo.from_json(json['shapeOutside']) if json.get('shapeOutside', None) is not None else None,
)
@@ -800,6 +845,7 @@ def get_document(
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,Node]:
'''
Returns the root DOM node (and optionally the subtree) to the caller.
+ Implicitly enables the DOM domain events for the current target.
:param depth: *(Optional)* The maximum depth at which children should be retrieved, defaults to 1. Use -1 for the entire subtree or provide an integer larger than 0.
:param pierce: *(Optional)* Whether or not iframes and shadow roots should be traversed when returning the subtree (default is false).
@@ -910,7 +956,7 @@ def get_node_for_location(
return (
BackendNodeId.from_json(json['backendNodeId']),
page.FrameId.from_json(json['frameId']),
- NodeId.from_json(json['nodeId']) if 'nodeId' in json else None
+ NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None
)
@@ -1177,6 +1223,23 @@ def query_selector_all(
return [NodeId.from_json(i) for i in json['nodeIds']]
+def get_top_layer_elements() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[NodeId]]:
+ '''
+ Returns NodeIds of current top layer elements.
+ Top layer is rendered closest to the user within a viewport, therefore its elements always
+ appear on top of all other content.
+
+ **EXPERIMENTAL**
+
+ :returns: NodeIds of top layer elements
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DOM.getTopLayerElements',
+ }
+ json = yield cmd_dict
+ return [NodeId.from_json(i) for i in json['nodeIds']]
+
+
def redo() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Re-does the last undone action.
@@ -1420,7 +1483,7 @@ def get_node_stack_traces(
'params': params,
}
json = yield cmd_dict
- return runtime.StackTrace.from_json(json['creation']) if 'creation' in json else None
+ return runtime.StackTrace.from_json(json['creation']) if json.get('creation', None) is not None else None
def get_file_info(
@@ -1562,35 +1625,44 @@ def get_frame_owner(
json = yield cmd_dict
return (
BackendNodeId.from_json(json['backendNodeId']),
- NodeId.from_json(json['nodeId']) if 'nodeId' in json else None
+ NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None
)
def get_container_for_node(
node_id: NodeId,
- container_name: typing.Optional[str] = None
+ container_name: typing.Optional[str] = None,
+ physical_axes: typing.Optional[PhysicalAxes] = None,
+ logical_axes: typing.Optional[LogicalAxes] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[NodeId]]:
'''
- Returns the container of the given node based on container query conditions.
- If containerName is given, it will find the nearest container with a matching name;
- otherwise it will find the nearest container regardless of its container name.
+ Returns the query container of the given node based on container query
+ conditions: containerName, physical, and logical axes. If no axes are
+ provided, the style container is returned, which is the direct parent or the
+ closest element with a matching container-name.
**EXPERIMENTAL**
:param node_id:
:param container_name: *(Optional)*
+ :param physical_axes: *(Optional)*
+ :param logical_axes: *(Optional)*
:returns: *(Optional)* The container node for the given node, or null if not found.
'''
params: T_JSON_DICT = dict()
params['nodeId'] = node_id.to_json()
if container_name is not None:
params['containerName'] = container_name
+ if physical_axes is not None:
+ params['physicalAxes'] = physical_axes.to_json()
+ if logical_axes is not None:
+ params['logicalAxes'] = logical_axes.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'DOM.getContainerForNode',
'params': params,
}
json = yield cmd_dict
- return NodeId.from_json(json['nodeId']) if 'nodeId' in json else None
+ return NodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None
def get_querying_descendants_for_container(
@@ -1702,7 +1774,7 @@ class ChildNodeInserted:
'''
#: Id of the node that has changed.
parent_node_id: NodeId
- #: If of the previous siblint.
+ #: Id of the previous sibling.
previous_node_id: NodeId
#: Inserted node data.
node: Node
@@ -1810,6 +1882,23 @@ def from_json(cls, json: T_JSON_DICT) -> PseudoElementAdded:
)
+@event_class('DOM.topLayerElementsUpdated')
+@dataclass
+class TopLayerElementsUpdated:
+ '''
+ **EXPERIMENTAL**
+
+ Called when top layer elements are changed.
+ '''
+
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> TopLayerElementsUpdated:
+ return cls(
+
+ )
+
+
@event_class('DOM.pseudoElementRemoved')
@dataclass
class PseudoElementRemoved:
diff --git a/pycdp/cdp/dom_debugger.py b/pycdp/cdp/dom_debugger.py
index 19fd2bc..624776a 100644
--- a/pycdp/cdp/dom_debugger.py
+++ b/pycdp/cdp/dom_debugger.py
@@ -108,9 +108,9 @@ def from_json(cls, json: T_JSON_DICT) -> EventListener:
script_id=runtime.ScriptId.from_json(json['scriptId']),
line_number=int(json['lineNumber']),
column_number=int(json['columnNumber']),
- handler=runtime.RemoteObject.from_json(json['handler']) if 'handler' in json else None,
- original_handler=runtime.RemoteObject.from_json(json['originalHandler']) if 'originalHandler' in json else None,
- backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None,
+ handler=runtime.RemoteObject.from_json(json['handler']) if json.get('handler', None) is not None else None,
+ original_handler=runtime.RemoteObject.from_json(json['originalHandler']) if json.get('originalHandler', None) is not None else None,
+ backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None,
)
diff --git a/pycdp/cdp/dom_snapshot.py b/pycdp/cdp/dom_snapshot.py
index c9e2513..017c219 100644
--- a/pycdp/cdp/dom_snapshot.py
+++ b/pycdp/cdp/dom_snapshot.py
@@ -174,30 +174,30 @@ def from_json(cls, json: T_JSON_DICT) -> DOMNode:
node_name=str(json['nodeName']),
node_value=str(json['nodeValue']),
backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']),
- text_value=str(json['textValue']) if 'textValue' in json else None,
- input_value=str(json['inputValue']) if 'inputValue' in json else None,
- input_checked=bool(json['inputChecked']) if 'inputChecked' in json else None,
- option_selected=bool(json['optionSelected']) if 'optionSelected' in json else None,
- child_node_indexes=[int(i) for i in json['childNodeIndexes']] if 'childNodeIndexes' in json else None,
- attributes=[NameValue.from_json(i) for i in json['attributes']] if 'attributes' in json else None,
- pseudo_element_indexes=[int(i) for i in json['pseudoElementIndexes']] if 'pseudoElementIndexes' in json else None,
- layout_node_index=int(json['layoutNodeIndex']) if 'layoutNodeIndex' in json else None,
- document_url=str(json['documentURL']) if 'documentURL' in json else None,
- base_url=str(json['baseURL']) if 'baseURL' in json else None,
- content_language=str(json['contentLanguage']) if 'contentLanguage' in json else None,
- document_encoding=str(json['documentEncoding']) if 'documentEncoding' in json else None,
- public_id=str(json['publicId']) if 'publicId' in json else None,
- system_id=str(json['systemId']) if 'systemId' in json else None,
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
- content_document_index=int(json['contentDocumentIndex']) if 'contentDocumentIndex' in json else None,
- pseudo_type=dom.PseudoType.from_json(json['pseudoType']) if 'pseudoType' in json else None,
- shadow_root_type=dom.ShadowRootType.from_json(json['shadowRootType']) if 'shadowRootType' in json else None,
- is_clickable=bool(json['isClickable']) if 'isClickable' in json else None,
- event_listeners=[dom_debugger.EventListener.from_json(i) for i in json['eventListeners']] if 'eventListeners' in json else None,
- current_source_url=str(json['currentSourceURL']) if 'currentSourceURL' in json else None,
- origin_url=str(json['originURL']) if 'originURL' in json else None,
- scroll_offset_x=float(json['scrollOffsetX']) if 'scrollOffsetX' in json else None,
- scroll_offset_y=float(json['scrollOffsetY']) if 'scrollOffsetY' in json else None,
+ text_value=str(json['textValue']) if json.get('textValue', None) is not None else None,
+ input_value=str(json['inputValue']) if json.get('inputValue', None) is not None else None,
+ input_checked=bool(json['inputChecked']) if json.get('inputChecked', None) is not None else None,
+ option_selected=bool(json['optionSelected']) if json.get('optionSelected', None) is not None else None,
+ child_node_indexes=[int(i) for i in json['childNodeIndexes']] if json.get('childNodeIndexes', None) is not None else None,
+ attributes=[NameValue.from_json(i) for i in json['attributes']] if json.get('attributes', None) is not None else None,
+ pseudo_element_indexes=[int(i) for i in json['pseudoElementIndexes']] if json.get('pseudoElementIndexes', None) is not None else None,
+ layout_node_index=int(json['layoutNodeIndex']) if json.get('layoutNodeIndex', None) is not None else None,
+ document_url=str(json['documentURL']) if json.get('documentURL', None) is not None else None,
+ base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None,
+ content_language=str(json['contentLanguage']) if json.get('contentLanguage', None) is not None else None,
+ document_encoding=str(json['documentEncoding']) if json.get('documentEncoding', None) is not None else None,
+ public_id=str(json['publicId']) if json.get('publicId', None) is not None else None,
+ system_id=str(json['systemId']) if json.get('systemId', None) is not None else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None,
+ content_document_index=int(json['contentDocumentIndex']) if json.get('contentDocumentIndex', None) is not None else None,
+ pseudo_type=dom.PseudoType.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None,
+ shadow_root_type=dom.ShadowRootType.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None,
+ is_clickable=bool(json['isClickable']) if json.get('isClickable', None) is not None else None,
+ event_listeners=[dom_debugger.EventListener.from_json(i) for i in json['eventListeners']] if json.get('eventListeners', None) is not None else None,
+ current_source_url=str(json['currentSourceURL']) if json.get('currentSourceURL', None) is not None else None,
+ origin_url=str(json['originURL']) if json.get('originURL', None) is not None else None,
+ scroll_offset_x=float(json['scrollOffsetX']) if json.get('scrollOffsetX', None) is not None else None,
+ scroll_offset_y=float(json['scrollOffsetY']) if json.get('scrollOffsetY', None) is not None else None,
)
@@ -283,11 +283,11 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutTreeNode:
return cls(
dom_node_index=int(json['domNodeIndex']),
bounding_box=dom.Rect.from_json(json['boundingBox']),
- layout_text=str(json['layoutText']) if 'layoutText' in json else None,
- inline_text_nodes=[InlineTextBox.from_json(i) for i in json['inlineTextNodes']] if 'inlineTextNodes' in json else None,
- style_index=int(json['styleIndex']) if 'styleIndex' in json else None,
- paint_order=int(json['paintOrder']) if 'paintOrder' in json else None,
- is_stacking_context=bool(json['isStackingContext']) if 'isStackingContext' in json else None,
+ layout_text=str(json['layoutText']) if json.get('layoutText', None) is not None else None,
+ inline_text_nodes=[InlineTextBox.from_json(i) for i in json['inlineTextNodes']] if json.get('inlineTextNodes', None) is not None else None,
+ style_index=int(json['styleIndex']) if json.get('styleIndex', None) is not None else None,
+ paint_order=int(json['paintOrder']) if json.get('paintOrder', None) is not None else None,
+ is_stacking_context=bool(json['isStackingContext']) if json.get('isStackingContext', None) is not None else None,
)
@@ -524,10 +524,10 @@ def from_json(cls, json: T_JSON_DICT) -> DocumentSnapshot:
nodes=NodeTreeSnapshot.from_json(json['nodes']),
layout=LayoutTreeSnapshot.from_json(json['layout']),
text_boxes=TextBoxSnapshot.from_json(json['textBoxes']),
- scroll_offset_x=float(json['scrollOffsetX']) if 'scrollOffsetX' in json else None,
- scroll_offset_y=float(json['scrollOffsetY']) if 'scrollOffsetY' in json else None,
- content_width=float(json['contentWidth']) if 'contentWidth' in json else None,
- content_height=float(json['contentHeight']) if 'contentHeight' in json else None,
+ scroll_offset_x=float(json['scrollOffsetX']) if json.get('scrollOffsetX', None) is not None else None,
+ scroll_offset_y=float(json['scrollOffsetY']) if json.get('scrollOffsetY', None) is not None else None,
+ content_width=float(json['contentWidth']) if json.get('contentWidth', None) is not None else None,
+ content_height=float(json['contentHeight']) if json.get('contentHeight', None) is not None else None,
)
@@ -575,6 +575,10 @@ class NodeTreeSnapshot:
#: Type of a pseudo element node.
pseudo_type: typing.Optional[RareStringData] = None
+ #: Pseudo element identifier for this node. Only present if there is a
+ #: valid pseudoType.
+ pseudo_identifier: typing.Optional[RareStringData] = None
+
#: Whether this DOM node responds to mouse clicks. This includes nodes that have had click
#: event listeners attached via JavaScript as well as anchor tags that naturally navigate when
#: clicked.
@@ -614,6 +618,8 @@ def to_json(self) -> T_JSON_DICT:
json['contentDocumentIndex'] = self.content_document_index.to_json()
if self.pseudo_type is not None:
json['pseudoType'] = self.pseudo_type.to_json()
+ if self.pseudo_identifier is not None:
+ json['pseudoIdentifier'] = self.pseudo_identifier.to_json()
if self.is_clickable is not None:
json['isClickable'] = self.is_clickable.to_json()
if self.current_source_url is not None:
@@ -625,22 +631,23 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> NodeTreeSnapshot:
return cls(
- parent_index=[int(i) for i in json['parentIndex']] if 'parentIndex' in json else None,
- node_type=[int(i) for i in json['nodeType']] if 'nodeType' in json else None,
- shadow_root_type=RareStringData.from_json(json['shadowRootType']) if 'shadowRootType' in json else None,
- node_name=[StringIndex.from_json(i) for i in json['nodeName']] if 'nodeName' in json else None,
- node_value=[StringIndex.from_json(i) for i in json['nodeValue']] if 'nodeValue' in json else None,
- backend_node_id=[dom.BackendNodeId.from_json(i) for i in json['backendNodeId']] if 'backendNodeId' in json else None,
- attributes=[ArrayOfStrings.from_json(i) for i in json['attributes']] if 'attributes' in json else None,
- text_value=RareStringData.from_json(json['textValue']) if 'textValue' in json else None,
- input_value=RareStringData.from_json(json['inputValue']) if 'inputValue' in json else None,
- input_checked=RareBooleanData.from_json(json['inputChecked']) if 'inputChecked' in json else None,
- option_selected=RareBooleanData.from_json(json['optionSelected']) if 'optionSelected' in json else None,
- content_document_index=RareIntegerData.from_json(json['contentDocumentIndex']) if 'contentDocumentIndex' in json else None,
- pseudo_type=RareStringData.from_json(json['pseudoType']) if 'pseudoType' in json else None,
- is_clickable=RareBooleanData.from_json(json['isClickable']) if 'isClickable' in json else None,
- current_source_url=RareStringData.from_json(json['currentSourceURL']) if 'currentSourceURL' in json else None,
- origin_url=RareStringData.from_json(json['originURL']) if 'originURL' in json else None,
+ parent_index=[int(i) for i in json['parentIndex']] if json.get('parentIndex', None) is not None else None,
+ node_type=[int(i) for i in json['nodeType']] if json.get('nodeType', None) is not None else None,
+ shadow_root_type=RareStringData.from_json(json['shadowRootType']) if json.get('shadowRootType', None) is not None else None,
+ node_name=[StringIndex.from_json(i) for i in json['nodeName']] if json.get('nodeName', None) is not None else None,
+ node_value=[StringIndex.from_json(i) for i in json['nodeValue']] if json.get('nodeValue', None) is not None else None,
+ backend_node_id=[dom.BackendNodeId.from_json(i) for i in json['backendNodeId']] if json.get('backendNodeId', None) is not None else None,
+ attributes=[ArrayOfStrings.from_json(i) for i in json['attributes']] if json.get('attributes', None) is not None else None,
+ text_value=RareStringData.from_json(json['textValue']) if json.get('textValue', None) is not None else None,
+ input_value=RareStringData.from_json(json['inputValue']) if json.get('inputValue', None) is not None else None,
+ input_checked=RareBooleanData.from_json(json['inputChecked']) if json.get('inputChecked', None) is not None else None,
+ option_selected=RareBooleanData.from_json(json['optionSelected']) if json.get('optionSelected', None) is not None else None,
+ content_document_index=RareIntegerData.from_json(json['contentDocumentIndex']) if json.get('contentDocumentIndex', None) is not None else None,
+ pseudo_type=RareStringData.from_json(json['pseudoType']) if json.get('pseudoType', None) is not None else None,
+ pseudo_identifier=RareStringData.from_json(json['pseudoIdentifier']) if json.get('pseudoIdentifier', None) is not None else None,
+ is_clickable=RareBooleanData.from_json(json['isClickable']) if json.get('isClickable', None) is not None else None,
+ current_source_url=RareStringData.from_json(json['currentSourceURL']) if json.get('currentSourceURL', None) is not None else None,
+ origin_url=RareStringData.from_json(json['originURL']) if json.get('originURL', None) is not None else None,
)
@@ -713,12 +720,12 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutTreeSnapshot:
bounds=[Rectangle.from_json(i) for i in json['bounds']],
text=[StringIndex.from_json(i) for i in json['text']],
stacking_contexts=RareBooleanData.from_json(json['stackingContexts']),
- paint_orders=[int(i) for i in json['paintOrders']] if 'paintOrders' in json else None,
- offset_rects=[Rectangle.from_json(i) for i in json['offsetRects']] if 'offsetRects' in json else None,
- scroll_rects=[Rectangle.from_json(i) for i in json['scrollRects']] if 'scrollRects' in json else None,
- client_rects=[Rectangle.from_json(i) for i in json['clientRects']] if 'clientRects' in json else None,
- blended_background_colors=[StringIndex.from_json(i) for i in json['blendedBackgroundColors']] if 'blendedBackgroundColors' in json else None,
- text_color_opacities=[float(i) for i in json['textColorOpacities']] if 'textColorOpacities' in json else None,
+ paint_orders=[int(i) for i in json['paintOrders']] if json.get('paintOrders', None) is not None else None,
+ offset_rects=[Rectangle.from_json(i) for i in json['offsetRects']] if json.get('offsetRects', None) is not None else None,
+ scroll_rects=[Rectangle.from_json(i) for i in json['scrollRects']] if json.get('scrollRects', None) is not None else None,
+ client_rects=[Rectangle.from_json(i) for i in json['clientRects']] if json.get('clientRects', None) is not None else None,
+ blended_background_colors=[StringIndex.from_json(i) for i in json['blendedBackgroundColors']] if json.get('blendedBackgroundColors', None) is not None else None,
+ text_color_opacities=[float(i) for i in json['textColorOpacities']] if json.get('textColorOpacities', None) is not None else None,
)
diff --git a/pycdp/cdp/dom_storage.py b/pycdp/cdp/dom_storage.py
index ca23937..9df6516 100644
--- a/pycdp/cdp/dom_storage.py
+++ b/pycdp/cdp/dom_storage.py
@@ -12,28 +12,47 @@
from .util import event_class, T_JSON_DICT
+class SerializedStorageKey(str):
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> SerializedStorageKey:
+ return cls(json)
+
+ def __repr__(self):
+ return 'SerializedStorageKey({})'.format(super().__repr__())
+
+
@dataclass
class StorageId:
'''
DOM Storage identifier.
'''
- #: Security origin for the storage.
- security_origin: str
-
#: Whether the storage is local storage (not session storage).
is_local_storage: bool
+ #: Security origin for the storage.
+ security_origin: typing.Optional[str] = None
+
+ #: Represents a key by which DOM Storage keys its CachedStorageAreas
+ storage_key: typing.Optional[SerializedStorageKey] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['securityOrigin'] = self.security_origin
json['isLocalStorage'] = self.is_local_storage
+ if self.security_origin is not None:
+ json['securityOrigin'] = self.security_origin
+ if self.storage_key is not None:
+ json['storageKey'] = self.storage_key.to_json()
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> StorageId:
return cls(
- security_origin=str(json['securityOrigin']),
is_local_storage=bool(json['isLocalStorage']),
+ security_origin=str(json['securityOrigin']) if json.get('securityOrigin', None) is not None else None,
+ storage_key=SerializedStorageKey.from_json(json['storageKey']) if json.get('storageKey', None) is not None else None,
)
diff --git a/pycdp/cdp/emulation.py b/pycdp/cdp/emulation.py
index 0fd4269..b24dc28 100644
--- a/pycdp/cdp/emulation.py
+++ b/pycdp/cdp/emulation.py
@@ -150,12 +150,18 @@ class UserAgentMetadata:
mobile: bool
+ #: Brands appearing in Sec-CH-UA.
brands: typing.Optional[typing.List[UserAgentBrandVersion]] = None
+ #: Brands appearing in Sec-CH-UA-Full-Version-List.
full_version_list: typing.Optional[typing.List[UserAgentBrandVersion]] = None
full_version: typing.Optional[str] = None
+ bitness: typing.Optional[str] = None
+
+ wow64: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['platform'] = self.platform
@@ -169,6 +175,10 @@ def to_json(self) -> T_JSON_DICT:
json['fullVersionList'] = [i.to_json() for i in self.full_version_list]
if self.full_version is not None:
json['fullVersion'] = self.full_version
+ if self.bitness is not None:
+ json['bitness'] = self.bitness
+ if self.wow64 is not None:
+ json['wow64'] = self.wow64
return json
@classmethod
@@ -179,9 +189,11 @@ def from_json(cls, json: T_JSON_DICT) -> UserAgentMetadata:
architecture=str(json['architecture']),
model=str(json['model']),
mobile=bool(json['mobile']),
- brands=[UserAgentBrandVersion.from_json(i) for i in json['brands']] if 'brands' in json else None,
- full_version_list=[UserAgentBrandVersion.from_json(i) for i in json['fullVersionList']] if 'fullVersionList' in json else None,
- full_version=str(json['fullVersion']) if 'fullVersion' in json else None,
+ brands=[UserAgentBrandVersion.from_json(i) for i in json['brands']] if json.get('brands', None) is not None else None,
+ full_version_list=[UserAgentBrandVersion.from_json(i) for i in json['fullVersionList']] if json.get('fullVersionList', None) is not None else None,
+ full_version=str(json['fullVersion']) if json.get('fullVersion', None) is not None else None,
+ bitness=str(json['bitness']) if json.get('bitness', None) is not None else None,
+ wow64=bool(json['wow64']) if json.get('wow64', None) is not None else None,
)
@@ -190,7 +202,6 @@ class DisabledImageType(enum.Enum):
Enum of image types that can be disabled.
'''
AVIF = "avif"
- JXL = "jxl"
WEBP = "webp"
def to_json(self) -> str:
@@ -478,7 +489,7 @@ def set_emulated_vision_deficiency(
**EXPERIMENTAL**
- :param type_: Vision deficiency to emulate.
+ :param type_: Vision deficiency to emulate. Order: best-effort emulations come first, followed by any physiologically accurate emulations for medically recognized color vision deficiencies.
'''
params: T_JSON_DICT = dict()
params['type'] = type_
@@ -748,6 +759,25 @@ def set_disabled_image_types(
json = yield cmd_dict
+def set_hardware_concurrency_override(
+ hardware_concurrency: int
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+
+
+ **EXPERIMENTAL**
+
+ :param hardware_concurrency: Hardware concurrency to report
+ '''
+ params: T_JSON_DICT = dict()
+ params['hardwareConcurrency'] = hardware_concurrency
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setHardwareConcurrencyOverride',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def set_user_agent_override(
user_agent: str,
accept_language: typing.Optional[str] = None,
@@ -777,6 +807,25 @@ def set_user_agent_override(
json = yield cmd_dict
+def set_automation_override(
+ enabled: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Allows overriding the automation flag.
+
+ **EXPERIMENTAL**
+
+ :param enabled: Whether the override should be enabled.
+ '''
+ params: T_JSON_DICT = dict()
+ params['enabled'] = enabled
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Emulation.setAutomationOverride',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
@event_class('Emulation.virtualTimeBudgetExpired')
@dataclass
class VirtualTimeBudgetExpired:
diff --git a/pycdp/cdp/fetch.py b/pycdp/cdp/fetch.py
index 0ba5296..f37f4cc 100644
--- a/pycdp/cdp/fetch.py
+++ b/pycdp/cdp/fetch.py
@@ -73,9 +73,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> RequestPattern:
return cls(
- url_pattern=str(json['urlPattern']) if 'urlPattern' in json else None,
- resource_type=network.ResourceType.from_json(json['resourceType']) if 'resourceType' in json else None,
- request_stage=RequestStage.from_json(json['requestStage']) if 'requestStage' in json else None,
+ url_pattern=str(json['urlPattern']) if json.get('urlPattern', None) is not None else None,
+ resource_type=network.ResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None,
+ request_stage=RequestStage.from_json(json['requestStage']) if json.get('requestStage', None) is not None else None,
)
@@ -134,7 +134,7 @@ def from_json(cls, json: T_JSON_DICT) -> AuthChallenge:
origin=str(json['origin']),
scheme=str(json['scheme']),
realm=str(json['realm']),
- source=str(json['source']) if 'source' in json else None,
+ source=str(json['source']) if json.get('source', None) is not None else None,
)
@@ -169,8 +169,8 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AuthChallengeResponse:
return cls(
response=str(json['response']),
- username=str(json['username']) if 'username' in json else None,
- password=str(json['password']) if 'password' in json else None,
+ username=str(json['username']) if json.get('username', None) is not None else None,
+ password=str(json['password']) if json.get('password', None) is not None else None,
)
@@ -278,7 +278,7 @@ def continue_request(
:param url: *(Optional)* If set, the request url will be modified in a way that's not observable by page.
:param method: *(Optional)* If set, the request method is overridden.
:param post_data: *(Optional)* If set, overrides the post data in the request. (Encoded as a base64 string when passed over JSON)
- :param headers: *(Optional)* If set, overrides the request headers.
+ :param headers: *(Optional)* If set, overrides the request headers. Note that the overrides do not extend to subsequent redirect hops, if a redirect happens. Another override may be applied to a different request produced by a redirect.
:param intercept_response: **(EXPERIMENTAL)** *(Optional)* If set, overrides response interception behavior for this request.
'''
params: T_JSON_DICT = dict()
@@ -444,7 +444,10 @@ class RequestPaused:
response_headers: typing.Optional[typing.List[HeaderEntry]]
#: If the intercepted request had a corresponding Network.requestWillBeSent event fired for it,
#: then this networkId will be the same as the requestId present in the requestWillBeSent event.
- network_id: typing.Optional[RequestId]
+ network_id: typing.Optional[network.RequestId]
+ #: If the request is due to a redirect response from the server, the id of the request that
+ #: has caused the redirect.
+ redirected_request_id: typing.Optional[RequestId]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> RequestPaused:
@@ -453,11 +456,12 @@ def from_json(cls, json: T_JSON_DICT) -> RequestPaused:
request=network.Request.from_json(json['request']),
frame_id=page.FrameId.from_json(json['frameId']),
resource_type=network.ResourceType.from_json(json['resourceType']),
- response_error_reason=network.ErrorReason.from_json(json['responseErrorReason']) if 'responseErrorReason' in json else None,
- response_status_code=int(json['responseStatusCode']) if 'responseStatusCode' in json else None,
- response_status_text=str(json['responseStatusText']) if 'responseStatusText' in json else None,
- response_headers=[HeaderEntry.from_json(i) for i in json['responseHeaders']] if 'responseHeaders' in json else None,
- network_id=RequestId.from_json(json['networkId']) if 'networkId' in json else None
+ response_error_reason=network.ErrorReason.from_json(json['responseErrorReason']) if json.get('responseErrorReason', None) is not None else None,
+ response_status_code=int(json['responseStatusCode']) if json.get('responseStatusCode', None) is not None else None,
+ response_status_text=str(json['responseStatusText']) if json.get('responseStatusText', None) is not None else None,
+ response_headers=[HeaderEntry.from_json(i) for i in json['responseHeaders']] if json.get('responseHeaders', None) is not None else None,
+ network_id=network.RequestId.from_json(json['networkId']) if json.get('networkId', None) is not None else None,
+ redirected_request_id=RequestId.from_json(json['redirectedRequestId']) if json.get('redirectedRequestId', None) is not None else None
)
diff --git a/pycdp/cdp/headless_experimental.py b/pycdp/cdp/headless_experimental.py
index 37be695..7577334 100644
--- a/pycdp/cdp/headless_experimental.py
+++ b/pycdp/cdp/headless_experimental.py
@@ -26,19 +26,25 @@ class ScreenshotParams:
#: Compression quality from range [0..100] (jpeg only).
quality: typing.Optional[int] = None
+ #: Optimize image encoding for speed, not for resulting size (defaults to false)
+ optimize_for_speed: typing.Optional[bool] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
if self.format_ is not None:
json['format'] = self.format_
if self.quality is not None:
json['quality'] = self.quality
+ if self.optimize_for_speed is not None:
+ json['optimizeForSpeed'] = self.optimize_for_speed
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScreenshotParams:
return cls(
- format_=str(json['format']) if 'format' in json else None,
- quality=int(json['quality']) if 'quality' in json else None,
+ format_=str(json['format']) if json.get('format', None) is not None else None,
+ quality=int(json['quality']) if json.get('quality', None) is not None else None,
+ optimize_for_speed=bool(json['optimizeForSpeed']) if json.get('optimizeForSpeed', None) is not None else None,
)
@@ -52,7 +58,7 @@ def begin_frame(
Sends a BeginFrame to the target and returns when the frame was completed. Optionally captures a
screenshot from the resulting frame. Requires that the target was created with enabled
BeginFrameControl. Designed for use with --run-all-compositor-stages-before-draw, see also
- https://goo.gl/3zHXhB for more background.
+ https://goo.gle/chrome-headless-rendering for more background.
:param frame_time_ticks: *(Optional)* Timestamp of this BeginFrame in Renderer TimeTicks (milliseconds of uptime). If not set, the current time will be used.
:param interval: *(Optional)* The interval between BeginFrames that is reported to the compositor, in milliseconds. Defaults to a 60 frames/second interval, i.e. about 16.666 milliseconds.
@@ -79,13 +85,16 @@ def begin_frame(
json = yield cmd_dict
return (
bool(json['hasDamage']),
- str(json['screenshotData']) if 'screenshotData' in json else None
+ str(json['screenshotData']) if json.get('screenshotData', None) is not None else None
)
+@deprecated(version="1.3")
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Disables headless events for the target.
+
+ .. deprecated:: 1.3
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.disable',
@@ -93,30 +102,14 @@ def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
+@deprecated(version="1.3")
def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enables headless events for the target.
+
+ .. deprecated:: 1.3
'''
cmd_dict: T_JSON_DICT = {
'method': 'HeadlessExperimental.enable',
}
json = yield cmd_dict
-
-
-@deprecated(version="1.3")
-@event_class('HeadlessExperimental.needsBeginFramesChanged')
-@dataclass
-class NeedsBeginFramesChanged:
- '''
- Issued when the target starts or stops needing BeginFrames.
- Deprecated. Issue beginFrame unconditionally instead and use result from
- beginFrame to detect whether the frames were suppressed.
- '''
- #: True if BeginFrames are needed, false otherwise.
- needs_begin_frames: bool
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> NeedsBeginFramesChanged:
- return cls(
- needs_begin_frames=bool(json['needsBeginFrames'])
- )
diff --git a/pycdp/cdp/heap_profiler.py b/pycdp/cdp/heap_profiler.py
index cf6717d..e28df4d 100644
--- a/pycdp/cdp/heap_profiler.py
+++ b/pycdp/cdp/heap_profiler.py
@@ -212,14 +212,22 @@ def get_sampling_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SamplingH
def start_sampling(
- sampling_interval: typing.Optional[float] = None
+ sampling_interval: typing.Optional[float] = None,
+ include_objects_collected_by_major_gc: typing.Optional[bool] = None,
+ include_objects_collected_by_minor_gc: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
:param sampling_interval: *(Optional)* Average sample interval in bytes. Poisson distribution is used for the intervals. The default value is 32768 bytes.
+ :param include_objects_collected_by_major_gc: *(Optional)* By default, the sampling heap profiler reports only objects which are still alive when the profile is returned via getSamplingProfile or stopSampling, which is useful for determining what functions contribute the most to steady-state memory usage. This flag instructs the sampling heap profiler to also include information about objects discarded by major GC, which will show which functions cause large temporary memory usage or long GC pauses.
+ :param include_objects_collected_by_minor_gc: *(Optional)* By default, the sampling heap profiler reports only objects which are still alive when the profile is returned via getSamplingProfile or stopSampling, which is useful for determining what functions contribute the most to steady-state memory usage. This flag instructs the sampling heap profiler to also include information about objects discarded by minor GC, which is useful when tuning a latency-sensitive application for minimal GC activity.
'''
params: T_JSON_DICT = dict()
if sampling_interval is not None:
params['samplingInterval'] = sampling_interval
+ if include_objects_collected_by_major_gc is not None:
+ params['includeObjectsCollectedByMajorGC'] = include_objects_collected_by_major_gc
+ if include_objects_collected_by_minor_gc is not None:
+ params['includeObjectsCollectedByMinorGC'] = include_objects_collected_by_minor_gc
cmd_dict: T_JSON_DICT = {
'method': 'HeapProfiler.startSampling',
'params': params,
@@ -259,12 +267,14 @@ def stop_sampling() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SamplingHeapProf
def stop_tracking_heap_objects(
report_progress: typing.Optional[bool] = None,
treat_global_objects_as_roots: typing.Optional[bool] = None,
- capture_numeric_value: typing.Optional[bool] = None
+ capture_numeric_value: typing.Optional[bool] = None,
+ expose_internals: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
:param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken when the tracking is stopped.
- :param treat_global_objects_as_roots: *(Optional)*
+ :param treat_global_objects_as_roots: **(DEPRECATED)** *(Optional)* Deprecated in favor of ```exposeInternals```.
:param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot
+ :param expose_internals: **(EXPERIMENTAL)** *(Optional)* If true, exposes internals of the snapshot.
'''
params: T_JSON_DICT = dict()
if report_progress is not None:
@@ -273,6 +283,8 @@ def stop_tracking_heap_objects(
params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots
if capture_numeric_value is not None:
params['captureNumericValue'] = capture_numeric_value
+ if expose_internals is not None:
+ params['exposeInternals'] = expose_internals
cmd_dict: T_JSON_DICT = {
'method': 'HeapProfiler.stopTrackingHeapObjects',
'params': params,
@@ -283,12 +295,14 @@ def stop_tracking_heap_objects(
def take_heap_snapshot(
report_progress: typing.Optional[bool] = None,
treat_global_objects_as_roots: typing.Optional[bool] = None,
- capture_numeric_value: typing.Optional[bool] = None
+ capture_numeric_value: typing.Optional[bool] = None,
+ expose_internals: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
:param report_progress: *(Optional)* If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken.
- :param treat_global_objects_as_roots: *(Optional)* If true, a raw snapshot without artificial roots will be generated
+ :param treat_global_objects_as_roots: **(DEPRECATED)** *(Optional)* If true, a raw snapshot without artificial roots will be generated. Deprecated in favor of ```exposeInternals```.
:param capture_numeric_value: *(Optional)* If true, numerical values are included in the snapshot
+ :param expose_internals: **(EXPERIMENTAL)** *(Optional)* If true, exposes internals of the snapshot.
'''
params: T_JSON_DICT = dict()
if report_progress is not None:
@@ -297,6 +311,8 @@ def take_heap_snapshot(
params['treatGlobalObjectsAsRoots'] = treat_global_objects_as_roots
if capture_numeric_value is not None:
params['captureNumericValue'] = capture_numeric_value
+ if expose_internals is not None:
+ params['exposeInternals'] = expose_internals
cmd_dict: T_JSON_DICT = {
'method': 'HeapProfiler.takeHeapSnapshot',
'params': params,
@@ -365,7 +381,7 @@ def from_json(cls, json: T_JSON_DICT) -> ReportHeapSnapshotProgress:
return cls(
done=int(json['done']),
total=int(json['total']),
- finished=bool(json['finished']) if 'finished' in json else None
+ finished=bool(json['finished']) if json.get('finished', None) is not None else None
)
diff --git a/pycdp/cdp/indexed_db.py b/pycdp/cdp/indexed_db.py
index 06fe96d..1685e07 100644
--- a/pycdp/cdp/indexed_db.py
+++ b/pycdp/cdp/indexed_db.py
@@ -152,10 +152,10 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> Key:
return cls(
type_=str(json['type']),
- number=float(json['number']) if 'number' in json else None,
- string=str(json['string']) if 'string' in json else None,
- date=float(json['date']) if 'date' in json else None,
- array=[Key.from_json(i) for i in json['array']] if 'array' in json else None,
+ number=float(json['number']) if json.get('number', None) is not None else None,
+ string=str(json['string']) if json.get('string', None) is not None else None,
+ date=float(json['date']) if json.get('date', None) is not None else None,
+ array=[Key.from_json(i) for i in json['array']] if json.get('array', None) is not None else None,
)
@@ -191,8 +191,8 @@ def from_json(cls, json: T_JSON_DICT) -> KeyRange:
return cls(
lower_open=bool(json['lowerOpen']),
upper_open=bool(json['upperOpen']),
- lower=Key.from_json(json['lower']) if 'lower' in json else None,
- upper=Key.from_json(json['upper']) if 'upper' in json else None,
+ lower=Key.from_json(json['lower']) if json.get('lower', None) is not None else None,
+ upper=Key.from_json(json['upper']) if json.get('upper', None) is not None else None,
)
@@ -253,25 +253,30 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> KeyPath:
return cls(
type_=str(json['type']),
- string=str(json['string']) if 'string' in json else None,
- array=[str(i) for i in json['array']] if 'array' in json else None,
+ string=str(json['string']) if json.get('string', None) is not None else None,
+ array=[str(i) for i in json['array']] if json.get('array', None) is not None else None,
)
def clear_object_store(
- security_origin: str,
database_name: str,
- object_store_name: str
+ object_store_name: str,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Clears all entries from an object store.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name: Database name.
:param object_store_name: Object store name.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
params['objectStoreName'] = object_store_name
cmd_dict: T_JSON_DICT = {
@@ -282,17 +287,22 @@ def clear_object_store(
def delete_database(
- security_origin: str,
- database_name: str
+ database_name: str,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Deletes a database.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name: Database name.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
cmd_dict: T_JSON_DICT = {
'method': 'IndexedDB.deleteDatabase',
@@ -302,21 +312,26 @@ def delete_database(
def delete_object_store_entries(
- security_origin: str,
database_name: str,
object_store_name: str,
- key_range: KeyRange
+ key_range: KeyRange,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Delete a range of entries from an object store
- :param security_origin:
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name:
:param object_store_name:
:param key_range: Range of entry keys to delete
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
params['objectStoreName'] = object_store_name
params['keyRange'] = key_range.to_json()
@@ -348,18 +363,20 @@ def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def request_data(
- security_origin: str,
database_name: str,
object_store_name: str,
index_name: str,
skip_count: int,
page_size: int,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None,
key_range: typing.Optional[KeyRange] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[DataEntry], bool]]:
'''
Requests data from object store or index.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name: Database name.
:param object_store_name: Object store name.
:param index_name: Index name, empty string for object store data requests.
@@ -372,7 +389,10 @@ def request_data(
1. **hasMore** - If true, there are more entries to fetch in the given range.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
params['objectStoreName'] = object_store_name
params['indexName'] = index_name
@@ -392,14 +412,16 @@ def request_data(
def get_metadata(
- security_origin: str,
database_name: str,
- object_store_name: str
+ object_store_name: str,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[float, float]]:
'''
- Gets metadata of an object store
+ Gets metadata of an object store.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name: Database name.
:param object_store_name: Object store name.
:returns: A tuple with the following items:
@@ -408,7 +430,10 @@ def get_metadata(
1. **keyGeneratorValue** - the current value of key generator, to become the next inserted key into the object store. Valid if objectStore.autoIncrement is true.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
params['objectStoreName'] = object_store_name
cmd_dict: T_JSON_DICT = {
@@ -423,18 +448,23 @@ def get_metadata(
def request_database(
- security_origin: str,
- database_name: str
+ database_name: str,
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,DatabaseWithObjectStores]:
'''
Requests database with given name in given frame.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:param database_name: Database name.
:returns: Database with an array of object stores.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
params['databaseName'] = database_name
cmd_dict: T_JSON_DICT = {
'method': 'IndexedDB.requestDatabase',
@@ -445,16 +475,21 @@ def request_database(
def request_database_names(
- security_origin: str
+ security_origin: typing.Optional[str] = None,
+ storage_key: typing.Optional[str] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[str]]:
'''
Requests database names for given security origin.
- :param security_origin: Security origin.
+ :param security_origin: *(Optional)* At least and at most one of securityOrigin, storageKey must be specified. Security origin.
+ :param storage_key: *(Optional)* Storage key.
:returns: Database names for origin.
'''
params: T_JSON_DICT = dict()
- params['securityOrigin'] = security_origin
+ if security_origin is not None:
+ params['securityOrigin'] = security_origin
+ if storage_key is not None:
+ params['storageKey'] = storage_key
cmd_dict: T_JSON_DICT = {
'method': 'IndexedDB.requestDatabaseNames',
'params': params,
diff --git a/pycdp/cdp/input_.py b/pycdp/cdp/input_.py
index 4705f37..0b3d175 100644
--- a/pycdp/cdp/input_.py
+++ b/pycdp/cdp/input_.py
@@ -77,15 +77,15 @@ def from_json(cls, json: T_JSON_DICT) -> TouchPoint:
return cls(
x=float(json['x']),
y=float(json['y']),
- radius_x=float(json['radiusX']) if 'radiusX' in json else None,
- radius_y=float(json['radiusY']) if 'radiusY' in json else None,
- rotation_angle=float(json['rotationAngle']) if 'rotationAngle' in json else None,
- force=float(json['force']) if 'force' in json else None,
- tangential_pressure=float(json['tangentialPressure']) if 'tangentialPressure' in json else None,
- tilt_x=int(json['tiltX']) if 'tiltX' in json else None,
- tilt_y=int(json['tiltY']) if 'tiltY' in json else None,
- twist=int(json['twist']) if 'twist' in json else None,
- id_=float(json['id']) if 'id' in json else None,
+ radius_x=float(json['radiusX']) if json.get('radiusX', None) is not None else None,
+ radius_y=float(json['radiusY']) if json.get('radiusY', None) is not None else None,
+ rotation_angle=float(json['rotationAngle']) if json.get('rotationAngle', None) is not None else None,
+ force=float(json['force']) if json.get('force', None) is not None else None,
+ tangential_pressure=float(json['tangentialPressure']) if json.get('tangentialPressure', None) is not None else None,
+ tilt_x=int(json['tiltX']) if json.get('tiltX', None) is not None else None,
+ tilt_y=int(json['tiltY']) if json.get('tiltY', None) is not None else None,
+ twist=int(json['twist']) if json.get('twist', None) is not None else None,
+ id_=float(json['id']) if json.get('id', None) is not None else None,
)
@@ -164,8 +164,8 @@ def from_json(cls, json: T_JSON_DICT) -> DragDataItem:
return cls(
mime_type=str(json['mimeType']),
data=str(json['data']),
- title=str(json['title']) if 'title' in json else None,
- base_url=str(json['baseURL']) if 'baseURL' in json else None,
+ title=str(json['title']) if json.get('title', None) is not None else None,
+ base_url=str(json['baseURL']) if json.get('baseURL', None) is not None else None,
)
@@ -192,7 +192,7 @@ def from_json(cls, json: T_JSON_DICT) -> DragData:
return cls(
items=[DragDataItem.from_json(i) for i in json['items']],
drag_operations_mask=int(json['dragOperationsMask']),
- files=[str(i) for i in json['files']] if 'files' in json else None,
+ files=[str(i) for i in json['files']] if json.get('files', None) is not None else None,
)
diff --git a/pycdp/cdp/io.py b/pycdp/cdp/io.py
index 3590778..1424cd7 100644
--- a/pycdp/cdp/io.py
+++ b/pycdp/cdp/io.py
@@ -76,7 +76,7 @@ def read(
}
json = yield cmd_dict
return (
- bool(json['base64Encoded']) if 'base64Encoded' in json else None,
+ bool(json['base64Encoded']) if json.get('base64Encoded', None) is not None else None,
str(json['data']),
bool(json['eof'])
)
diff --git a/pycdp/cdp/layer_tree.py b/pycdp/cdp/layer_tree.py
index c94d67c..08d8539 100644
--- a/pycdp/cdp/layer_tree.py
+++ b/pycdp/cdp/layer_tree.py
@@ -101,8 +101,8 @@ def from_json(cls, json: T_JSON_DICT) -> StickyPositionConstraint:
return cls(
sticky_box_rect=dom.Rect.from_json(json['stickyBoxRect']),
containing_block_rect=dom.Rect.from_json(json['containingBlockRect']),
- nearest_layer_shifting_sticky_box=LayerId.from_json(json['nearestLayerShiftingStickyBox']) if 'nearestLayerShiftingStickyBox' in json else None,
- nearest_layer_shifting_containing_block=LayerId.from_json(json['nearestLayerShiftingContainingBlock']) if 'nearestLayerShiftingContainingBlock' in json else None,
+ nearest_layer_shifting_sticky_box=LayerId.from_json(json['nearestLayerShiftingStickyBox']) if json.get('nearestLayerShiftingStickyBox', None) is not None else None,
+ nearest_layer_shifting_containing_block=LayerId.from_json(json['nearestLayerShiftingContainingBlock']) if json.get('nearestLayerShiftingContainingBlock', None) is not None else None,
)
@@ -229,15 +229,15 @@ def from_json(cls, json: T_JSON_DICT) -> Layer:
height=float(json['height']),
paint_count=int(json['paintCount']),
draws_content=bool(json['drawsContent']),
- parent_layer_id=LayerId.from_json(json['parentLayerId']) if 'parentLayerId' in json else None,
- backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if 'backendNodeId' in json else None,
- transform=[float(i) for i in json['transform']] if 'transform' in json else None,
- anchor_x=float(json['anchorX']) if 'anchorX' in json else None,
- anchor_y=float(json['anchorY']) if 'anchorY' in json else None,
- anchor_z=float(json['anchorZ']) if 'anchorZ' in json else None,
- invisible=bool(json['invisible']) if 'invisible' in json else None,
- scroll_rects=[ScrollRect.from_json(i) for i in json['scrollRects']] if 'scrollRects' in json else None,
- sticky_position_constraint=StickyPositionConstraint.from_json(json['stickyPositionConstraint']) if 'stickyPositionConstraint' in json else None,
+ parent_layer_id=LayerId.from_json(json['parentLayerId']) if json.get('parentLayerId', None) is not None else None,
+ backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None,
+ transform=[float(i) for i in json['transform']] if json.get('transform', None) is not None else None,
+ anchor_x=float(json['anchorX']) if json.get('anchorX', None) is not None else None,
+ anchor_y=float(json['anchorY']) if json.get('anchorY', None) is not None else None,
+ anchor_z=float(json['anchorZ']) if json.get('anchorZ', None) is not None else None,
+ invisible=bool(json['invisible']) if json.get('invisible', None) is not None else None,
+ scroll_rects=[ScrollRect.from_json(i) for i in json['scrollRects']] if json.get('scrollRects', None) is not None else None,
+ sticky_position_constraint=StickyPositionConstraint.from_json(json['stickyPositionConstraint']) if json.get('stickyPositionConstraint', None) is not None else None,
)
@@ -460,5 +460,5 @@ class LayerTreeDidChange:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> LayerTreeDidChange:
return cls(
- layers=[Layer.from_json(i) for i in json['layers']] if 'layers' in json else None
+ layers=[Layer.from_json(i) for i in json['layers']] if json.get('layers', None) is not None else None
)
diff --git a/pycdp/cdp/log.py b/pycdp/cdp/log.py
index 44db254..4acafb3 100644
--- a/pycdp/cdp/log.py
+++ b/pycdp/cdp/log.py
@@ -81,13 +81,13 @@ def from_json(cls, json: T_JSON_DICT) -> LogEntry:
level=str(json['level']),
text=str(json['text']),
timestamp=runtime.Timestamp.from_json(json['timestamp']),
- category=str(json['category']) if 'category' in json else None,
- url=str(json['url']) if 'url' in json else None,
- line_number=int(json['lineNumber']) if 'lineNumber' in json else None,
- stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
- network_request_id=network.RequestId.from_json(json['networkRequestId']) if 'networkRequestId' in json else None,
- worker_id=str(json['workerId']) if 'workerId' in json else None,
- args=[runtime.RemoteObject.from_json(i) for i in json['args']] if 'args' in json else None,
+ category=str(json['category']) if json.get('category', None) is not None else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ line_number=int(json['lineNumber']) if json.get('lineNumber', None) is not None else None,
+ stack_trace=runtime.StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None,
+ network_request_id=network.RequestId.from_json(json['networkRequestId']) if json.get('networkRequestId', None) is not None else None,
+ worker_id=str(json['workerId']) if json.get('workerId', None) is not None else None,
+ args=[runtime.RemoteObject.from_json(i) for i in json['args']] if json.get('args', None) is not None else None,
)
diff --git a/pycdp/cdp/media.py b/pycdp/cdp/media.py
index fd15922..7f6f430 100644
--- a/pycdp/cdp/media.py
+++ b/pycdp/cdp/media.py
@@ -118,31 +118,68 @@ def from_json(cls, json: T_JSON_DICT) -> PlayerEvent:
)
+@dataclass
+class PlayerErrorSourceLocation:
+ '''
+ Represents logged source line numbers reported in an error.
+ NOTE: file and line are from chromium c++ implementation code, not js.
+ '''
+ file: str
+
+ line: int
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['file'] = self.file
+ json['line'] = self.line
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PlayerErrorSourceLocation:
+ return cls(
+ file=str(json['file']),
+ line=int(json['line']),
+ )
+
+
@dataclass
class PlayerError:
'''
Corresponds to kMediaError
'''
- type_: str
+ error_type: str
+
+ #: Code is the numeric enum entry for a specific set of error codes, such
+ #: as PipelineStatusCodes in media/base/pipeline_status.h
+ code: int
+
+ #: A trace of where this error was caused / where it passed through.
+ stack: typing.List[PlayerErrorSourceLocation]
+
+ #: Errors potentially have a root cause error, ie, a DecoderError might be
+ #: caused by an WindowsError
+ cause: typing.List[PlayerError]
- #: When this switches to using media::Status instead of PipelineStatus
- #: we can remove "errorCode" and replace it with the fields from
- #: a Status instance. This also seems like a duplicate of the error
- #: level enum - there is a todo bug to have that level removed and
- #: use this instead. (crbug.com/1068454)
- error_code: str
+ #: Extra data attached to an error, such as an HRESULT, Video Codec, etc.
+ data: dict
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['type'] = self.type_
- json['errorCode'] = self.error_code
+ json['errorType'] = self.error_type
+ json['code'] = self.code
+ json['stack'] = [i.to_json() for i in self.stack]
+ json['cause'] = [i.to_json() for i in self.cause]
+ json['data'] = self.data
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> PlayerError:
return cls(
- type_=str(json['type']),
- error_code=str(json['errorCode']),
+ error_type=str(json['errorType']),
+ code=int(json['code']),
+ stack=[PlayerErrorSourceLocation.from_json(i) for i in json['stack']],
+ cause=[PlayerError.from_json(i) for i in json['cause']],
+ data=dict(json['data']),
)
diff --git a/pycdp/cdp/network.py b/pycdp/cdp/network.py
index 0d10271..e70981f 100644
--- a/pycdp/cdp/network.py
+++ b/pycdp/cdp/network.py
@@ -33,6 +33,7 @@ class ResourceType(enum.Enum):
TEXT_TRACK = "TextTrack"
XHR = "XHR"
FETCH = "Fetch"
+ PREFETCH = "Prefetch"
EVENT_SOURCE = "EventSource"
WEB_SOCKET = "WebSocket"
MANIFEST = "Manifest"
@@ -381,7 +382,7 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> PostDataEntry:
return cls(
- bytes_=str(json['bytes']) if 'bytes' in json else None,
+ bytes_=str(json['bytes']) if json.get('bytes', None) is not None else None,
)
@@ -464,14 +465,14 @@ def from_json(cls, json: T_JSON_DICT) -> Request:
headers=Headers.from_json(json['headers']),
initial_priority=ResourcePriority.from_json(json['initialPriority']),
referrer_policy=str(json['referrerPolicy']),
- url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None,
- post_data=str(json['postData']) if 'postData' in json else None,
- has_post_data=bool(json['hasPostData']) if 'hasPostData' in json else None,
- post_data_entries=[PostDataEntry.from_json(i) for i in json['postDataEntries']] if 'postDataEntries' in json else None,
- mixed_content_type=security.MixedContentType.from_json(json['mixedContentType']) if 'mixedContentType' in json else None,
- is_link_preload=bool(json['isLinkPreload']) if 'isLinkPreload' in json else None,
- trust_token_params=TrustTokenParams.from_json(json['trustTokenParams']) if 'trustTokenParams' in json else None,
- is_same_site=bool(json['isSameSite']) if 'isSameSite' in json else None,
+ url_fragment=str(json['urlFragment']) if json.get('urlFragment', None) is not None else None,
+ post_data=str(json['postData']) if json.get('postData', None) is not None else None,
+ has_post_data=bool(json['hasPostData']) if json.get('hasPostData', None) is not None else None,
+ post_data_entries=[PostDataEntry.from_json(i) for i in json['postDataEntries']] if json.get('postDataEntries', None) is not None else None,
+ mixed_content_type=security.MixedContentType.from_json(json['mixedContentType']) if json.get('mixedContentType', None) is not None else None,
+ is_link_preload=bool(json['isLinkPreload']) if json.get('isLinkPreload', None) is not None else None,
+ trust_token_params=TrustTokenParams.from_json(json['trustTokenParams']) if json.get('trustTokenParams', None) is not None else None,
+ is_same_site=bool(json['isSameSite']) if json.get('isSameSite', None) is not None else None,
)
@@ -569,12 +570,20 @@ class SecurityDetails:
#: Whether the request complied with Certificate Transparency policy
certificate_transparency_compliance: CertificateTransparencyCompliance
+ #: Whether the connection used Encrypted ClientHello
+ encrypted_client_hello: bool
+
#: (EC)DH group used by the connection, if applicable.
key_exchange_group: typing.Optional[str] = None
#: TLS MAC. Note that AEAD ciphers do not have separate MACs.
mac: typing.Optional[str] = None
+ #: The signature algorithm used by the server in the TLS server signature,
+ #: represented as a TLS SignatureScheme code point. Omitted if not
+ #: applicable or not known.
+ server_signature_algorithm: typing.Optional[int] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['protocol'] = self.protocol
@@ -588,10 +597,13 @@ def to_json(self) -> T_JSON_DICT:
json['validTo'] = self.valid_to.to_json()
json['signedCertificateTimestampList'] = [i.to_json() for i in self.signed_certificate_timestamp_list]
json['certificateTransparencyCompliance'] = self.certificate_transparency_compliance.to_json()
+ json['encryptedClientHello'] = self.encrypted_client_hello
if self.key_exchange_group is not None:
json['keyExchangeGroup'] = self.key_exchange_group
if self.mac is not None:
json['mac'] = self.mac
+ if self.server_signature_algorithm is not None:
+ json['serverSignatureAlgorithm'] = self.server_signature_algorithm
return json
@classmethod
@@ -608,8 +620,10 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityDetails:
valid_to=TimeSinceEpoch.from_json(json['validTo']),
signed_certificate_timestamp_list=[SignedCertificateTimestamp.from_json(i) for i in json['signedCertificateTimestampList']],
certificate_transparency_compliance=CertificateTransparencyCompliance.from_json(json['certificateTransparencyCompliance']),
- key_exchange_group=str(json['keyExchangeGroup']) if 'keyExchangeGroup' in json else None,
- mac=str(json['mac']) if 'mac' in json else None,
+ encrypted_client_hello=bool(json['encryptedClientHello']),
+ key_exchange_group=str(json['keyExchangeGroup']) if json.get('keyExchangeGroup', None) is not None else None,
+ mac=str(json['mac']) if json.get('mac', None) is not None else None,
+ server_signature_algorithm=int(json['serverSignatureAlgorithm']) if json.get('serverSignatureAlgorithm', None) is not None else None,
)
@@ -741,9 +755,9 @@ class TrustTokenParams:
depending on the type, some additional parameters. The values
are specified in third_party/blink/renderer/core/fetch/trust_token.idl.
'''
- type_: TrustTokenOperationType
+ operation: TrustTokenOperationType
- #: Only set for "token-redemption" type and determine whether
+ #: Only set for "token-redemption" operation and determine whether
#: to request a fresh SRR or use a still valid cached SRR.
refresh_policy: str
@@ -753,7 +767,7 @@ class TrustTokenParams:
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
- json['type'] = self.type_.to_json()
+ json['operation'] = self.operation.to_json()
json['refreshPolicy'] = self.refresh_policy
if self.issuers is not None:
json['issuers'] = [i for i in self.issuers]
@@ -762,9 +776,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> TrustTokenParams:
return cls(
- type_=TrustTokenOperationType.from_json(json['type']),
+ operation=TrustTokenOperationType.from_json(json['operation']),
refresh_policy=str(json['refreshPolicy']),
- issuers=[str(i) for i in json['issuers']] if 'issuers' in json else None,
+ issuers=[str(i) for i in json['issuers']] if json.get('issuers', None) is not None else None,
)
@@ -781,6 +795,27 @@ def from_json(cls, json: str) -> TrustTokenOperationType:
return cls(json)
+class AlternateProtocolUsage(enum.Enum):
+ '''
+ The reason why Chrome uses a specific transport protocol for HTTP semantics.
+ '''
+ ALTERNATIVE_JOB_WON_WITHOUT_RACE = "alternativeJobWonWithoutRace"
+ ALTERNATIVE_JOB_WON_RACE = "alternativeJobWonRace"
+ MAIN_JOB_WON_RACE = "mainJobWonRace"
+ MAPPING_MISSING = "mappingMissing"
+ BROKEN = "broken"
+ DNS_ALPN_H3_JOB_WON_WITHOUT_RACE = "dnsAlpnH3JobWonWithoutRace"
+ DNS_ALPN_H3_JOB_WON_RACE = "dnsAlpnH3JobWonRace"
+ UNSPECIFIED_REASON = "unspecifiedReason"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AlternateProtocolUsage:
+ return cls(json)
+
+
@dataclass
class Response:
'''
@@ -852,6 +887,9 @@ class Response:
#: Protocol used to fetch this request.
protocol: typing.Optional[str] = None
+ #: The reason why Chrome uses a specific transport protocol for HTTP semantics.
+ alternate_protocol_usage: typing.Optional[AlternateProtocolUsage] = None
+
#: Security details for the request.
security_details: typing.Optional[SecurityDetails] = None
@@ -892,6 +930,8 @@ def to_json(self) -> T_JSON_DICT:
json['cacheStorageCacheName'] = self.cache_storage_cache_name
if self.protocol is not None:
json['protocol'] = self.protocol
+ if self.alternate_protocol_usage is not None:
+ json['alternateProtocolUsage'] = self.alternate_protocol_usage.to_json()
if self.security_details is not None:
json['securityDetails'] = self.security_details.to_json()
return json
@@ -908,20 +948,21 @@ def from_json(cls, json: T_JSON_DICT) -> Response:
connection_id=float(json['connectionId']),
encoded_data_length=float(json['encodedDataLength']),
security_state=security.SecurityState.from_json(json['securityState']),
- headers_text=str(json['headersText']) if 'headersText' in json else None,
- request_headers=Headers.from_json(json['requestHeaders']) if 'requestHeaders' in json else None,
- request_headers_text=str(json['requestHeadersText']) if 'requestHeadersText' in json else None,
- remote_ip_address=str(json['remoteIPAddress']) if 'remoteIPAddress' in json else None,
- remote_port=int(json['remotePort']) if 'remotePort' in json else None,
- from_disk_cache=bool(json['fromDiskCache']) if 'fromDiskCache' in json else None,
- from_service_worker=bool(json['fromServiceWorker']) if 'fromServiceWorker' in json else None,
- from_prefetch_cache=bool(json['fromPrefetchCache']) if 'fromPrefetchCache' in json else None,
- timing=ResourceTiming.from_json(json['timing']) if 'timing' in json else None,
- service_worker_response_source=ServiceWorkerResponseSource.from_json(json['serviceWorkerResponseSource']) if 'serviceWorkerResponseSource' in json else None,
- response_time=TimeSinceEpoch.from_json(json['responseTime']) if 'responseTime' in json else None,
- cache_storage_cache_name=str(json['cacheStorageCacheName']) if 'cacheStorageCacheName' in json else None,
- protocol=str(json['protocol']) if 'protocol' in json else None,
- security_details=SecurityDetails.from_json(json['securityDetails']) if 'securityDetails' in json else None,
+ headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None,
+ request_headers=Headers.from_json(json['requestHeaders']) if json.get('requestHeaders', None) is not None else None,
+ request_headers_text=str(json['requestHeadersText']) if json.get('requestHeadersText', None) is not None else None,
+ remote_ip_address=str(json['remoteIPAddress']) if json.get('remoteIPAddress', None) is not None else None,
+ remote_port=int(json['remotePort']) if json.get('remotePort', None) is not None else None,
+ from_disk_cache=bool(json['fromDiskCache']) if json.get('fromDiskCache', None) is not None else None,
+ from_service_worker=bool(json['fromServiceWorker']) if json.get('fromServiceWorker', None) is not None else None,
+ from_prefetch_cache=bool(json['fromPrefetchCache']) if json.get('fromPrefetchCache', None) is not None else None,
+ timing=ResourceTiming.from_json(json['timing']) if json.get('timing', None) is not None else None,
+ service_worker_response_source=ServiceWorkerResponseSource.from_json(json['serviceWorkerResponseSource']) if json.get('serviceWorkerResponseSource', None) is not None else None,
+ response_time=TimeSinceEpoch.from_json(json['responseTime']) if json.get('responseTime', None) is not None else None,
+ cache_storage_cache_name=str(json['cacheStorageCacheName']) if json.get('cacheStorageCacheName', None) is not None else None,
+ protocol=str(json['protocol']) if json.get('protocol', None) is not None else None,
+ alternate_protocol_usage=AlternateProtocolUsage.from_json(json['alternateProtocolUsage']) if json.get('alternateProtocolUsage', None) is not None else None,
+ security_details=SecurityDetails.from_json(json['securityDetails']) if json.get('securityDetails', None) is not None else None,
)
@@ -987,9 +1028,9 @@ def from_json(cls, json: T_JSON_DICT) -> WebSocketResponse:
status=int(json['status']),
status_text=str(json['statusText']),
headers=Headers.from_json(json['headers']),
- headers_text=str(json['headersText']) if 'headersText' in json else None,
- request_headers=Headers.from_json(json['requestHeaders']) if 'requestHeaders' in json else None,
- request_headers_text=str(json['requestHeadersText']) if 'requestHeadersText' in json else None,
+ headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None,
+ request_headers=Headers.from_json(json['requestHeaders']) if json.get('requestHeaders', None) is not None else None,
+ request_headers_text=str(json['requestHeadersText']) if json.get('requestHeadersText', None) is not None else None,
)
@@ -1057,7 +1098,7 @@ def from_json(cls, json: T_JSON_DICT) -> CachedResource:
url=str(json['url']),
type_=ResourceType.from_json(json['type']),
body_size=float(json['bodySize']),
- response=Response.from_json(json['response']) if 'response' in json else None,
+ response=Response.from_json(json['response']) if json.get('response', None) is not None else None,
)
@@ -1105,11 +1146,11 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> Initiator:
return cls(
type_=str(json['type']),
- stack=runtime.StackTrace.from_json(json['stack']) if 'stack' in json else None,
- url=str(json['url']) if 'url' in json else None,
- line_number=float(json['lineNumber']) if 'lineNumber' in json else None,
- column_number=float(json['columnNumber']) if 'columnNumber' in json else None,
- request_id=RequestId.from_json(json['requestId']) if 'requestId' in json else None,
+ stack=runtime.StackTrace.from_json(json['stack']) if json.get('stack', None) is not None else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ line_number=float(json['lineNumber']) if json.get('lineNumber', None) is not None else None,
+ column_number=float(json['columnNumber']) if json.get('columnNumber', None) is not None else None,
+ request_id=RequestId.from_json(json['requestId']) if json.get('requestId', None) is not None else None,
)
@@ -1130,9 +1171,6 @@ class Cookie:
#: Cookie path.
path: str
- #: Cookie expiration date as the number of seconds since the UNIX epoch.
- expires: float
-
#: Cookie size.
size: int
@@ -1159,6 +1197,9 @@ class Cookie:
#: This is a temporary ability and it will be removed in the future.
source_port: int
+ #: Cookie expiration date as the number of seconds since the UNIX epoch.
+ expires: typing.Optional[float] = None
+
#: Cookie SameSite type.
same_site: typing.Optional[CookieSameSite] = None
@@ -1175,7 +1216,6 @@ def to_json(self) -> T_JSON_DICT:
json['value'] = self.value
json['domain'] = self.domain
json['path'] = self.path
- json['expires'] = self.expires
json['size'] = self.size
json['httpOnly'] = self.http_only
json['secure'] = self.secure
@@ -1184,6 +1224,8 @@ def to_json(self) -> T_JSON_DICT:
json['sameParty'] = self.same_party
json['sourceScheme'] = self.source_scheme.to_json()
json['sourcePort'] = self.source_port
+ if self.expires is not None:
+ json['expires'] = self.expires
if self.same_site is not None:
json['sameSite'] = self.same_site.to_json()
if self.partition_key is not None:
@@ -1199,7 +1241,6 @@ def from_json(cls, json: T_JSON_DICT) -> Cookie:
value=str(json['value']),
domain=str(json['domain']),
path=str(json['path']),
- expires=float(json['expires']),
size=int(json['size']),
http_only=bool(json['httpOnly']),
secure=bool(json['secure']),
@@ -1208,9 +1249,10 @@ def from_json(cls, json: T_JSON_DICT) -> Cookie:
same_party=bool(json['sameParty']),
source_scheme=CookieSourceScheme.from_json(json['sourceScheme']),
source_port=int(json['sourcePort']),
- same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None,
- partition_key=str(json['partitionKey']) if 'partitionKey' in json else None,
- partition_key_opaque=bool(json['partitionKeyOpaque']) if 'partitionKeyOpaque' in json else None,
+ expires=float(json['expires']) if json.get('expires', None) is not None else None,
+ same_site=CookieSameSite.from_json(json['sameSite']) if json.get('sameSite', None) is not None else None,
+ partition_key=str(json['partitionKey']) if json.get('partitionKey', None) is not None else None,
+ partition_key_opaque=bool(json['partitionKeyOpaque']) if json.get('partitionKeyOpaque', None) is not None else None,
)
@@ -1224,6 +1266,7 @@ class SetCookieBlockedReason(enum.Enum):
SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax"
SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure"
USER_PREFERENCES = "UserPreferences"
+ THIRD_PARTY_BLOCKED_IN_FIRST_PARTY_SET = "ThirdPartyBlockedInFirstPartySet"
SYNTAX_ERROR = "SyntaxError"
SCHEME_NOT_SUPPORTED = "SchemeNotSupported"
OVERWRITE_SECURE = "OverwriteSecure"
@@ -1257,6 +1300,7 @@ class CookieBlockedReason(enum.Enum):
SAME_SITE_UNSPECIFIED_TREATED_AS_LAX = "SameSiteUnspecifiedTreatedAsLax"
SAME_SITE_NONE_INSECURE = "SameSiteNoneInsecure"
USER_PREFERENCES = "UserPreferences"
+ THIRD_PARTY_BLOCKED_IN_FIRST_PARTY_SET = "ThirdPartyBlockedInFirstPartySet"
UNKNOWN_ERROR = "UnknownError"
SCHEMEFUL_SAME_SITE_STRICT = "SchemefulSameSiteStrict"
SCHEMEFUL_SAME_SITE_LAX = "SchemefulSameSiteLax"
@@ -1302,7 +1346,7 @@ def from_json(cls, json: T_JSON_DICT) -> BlockedSetCookieWithReason:
return cls(
blocked_reasons=[SetCookieBlockedReason.from_json(i) for i in json['blockedReasons']],
cookie_line=str(json['cookieLine']),
- cookie=Cookie.from_json(json['cookie']) if 'cookie' in json else None,
+ cookie=Cookie.from_json(json['cookie']) if json.get('cookie', None) is not None else None,
)
@@ -1418,18 +1462,18 @@ def from_json(cls, json: T_JSON_DICT) -> CookieParam:
return cls(
name=str(json['name']),
value=str(json['value']),
- url=str(json['url']) if 'url' in json else None,
- domain=str(json['domain']) if 'domain' in json else None,
- path=str(json['path']) if 'path' in json else None,
- secure=bool(json['secure']) if 'secure' in json else None,
- http_only=bool(json['httpOnly']) if 'httpOnly' in json else None,
- same_site=CookieSameSite.from_json(json['sameSite']) if 'sameSite' in json else None,
- expires=TimeSinceEpoch.from_json(json['expires']) if 'expires' in json else None,
- priority=CookiePriority.from_json(json['priority']) if 'priority' in json else None,
- same_party=bool(json['sameParty']) if 'sameParty' in json else None,
- source_scheme=CookieSourceScheme.from_json(json['sourceScheme']) if 'sourceScheme' in json else None,
- source_port=int(json['sourcePort']) if 'sourcePort' in json else None,
- partition_key=str(json['partitionKey']) if 'partitionKey' in json else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ domain=str(json['domain']) if json.get('domain', None) is not None else None,
+ path=str(json['path']) if json.get('path', None) is not None else None,
+ secure=bool(json['secure']) if json.get('secure', None) is not None else None,
+ http_only=bool(json['httpOnly']) if json.get('httpOnly', None) is not None else None,
+ same_site=CookieSameSite.from_json(json['sameSite']) if json.get('sameSite', None) is not None else None,
+ expires=TimeSinceEpoch.from_json(json['expires']) if json.get('expires', None) is not None else None,
+ priority=CookiePriority.from_json(json['priority']) if json.get('priority', None) is not None else None,
+ same_party=bool(json['sameParty']) if json.get('sameParty', None) is not None else None,
+ source_scheme=CookieSourceScheme.from_json(json['sourceScheme']) if json.get('sourceScheme', None) is not None else None,
+ source_port=int(json['sourcePort']) if json.get('sourcePort', None) is not None else None,
+ partition_key=str(json['partitionKey']) if json.get('partitionKey', None) is not None else None,
)
@@ -1465,7 +1509,7 @@ def from_json(cls, json: T_JSON_DICT) -> AuthChallenge:
origin=str(json['origin']),
scheme=str(json['scheme']),
realm=str(json['realm']),
- source=str(json['source']) if 'source' in json else None,
+ source=str(json['source']) if json.get('source', None) is not None else None,
)
@@ -1500,8 +1544,8 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AuthChallengeResponse:
return cls(
response=str(json['response']),
- username=str(json['username']) if 'username' in json else None,
- password=str(json['password']) if 'password' in json else None,
+ username=str(json['username']) if json.get('username', None) is not None else None,
+ password=str(json['password']) if json.get('password', None) is not None else None,
)
@@ -1549,9 +1593,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> RequestPattern:
return cls(
- url_pattern=str(json['urlPattern']) if 'urlPattern' in json else None,
- resource_type=ResourceType.from_json(json['resourceType']) if 'resourceType' in json else None,
- interception_stage=InterceptionStage.from_json(json['interceptionStage']) if 'interceptionStage' in json else None,
+ url_pattern=str(json['urlPattern']) if json.get('urlPattern', None) is not None else None,
+ resource_type=ResourceType.from_json(json['resourceType']) if json.get('resourceType', None) is not None else None,
+ interception_stage=InterceptionStage.from_json(json['interceptionStage']) if json.get('interceptionStage', None) is not None else None,
)
@@ -1613,9 +1657,9 @@ def from_json(cls, json: T_JSON_DICT) -> SignedExchangeSignature:
validity_url=str(json['validityUrl']),
date=int(json['date']),
expires=int(json['expires']),
- cert_url=str(json['certUrl']) if 'certUrl' in json else None,
- cert_sha256=str(json['certSha256']) if 'certSha256' in json else None,
- certificates=[str(i) for i in json['certificates']] if 'certificates' in json else None,
+ cert_url=str(json['certUrl']) if json.get('certUrl', None) is not None else None,
+ cert_sha256=str(json['certSha256']) if json.get('certSha256', None) is not None else None,
+ certificates=[str(i) for i in json['certificates']] if json.get('certificates', None) is not None else None,
)
@@ -1706,8 +1750,8 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> SignedExchangeError:
return cls(
message=str(json['message']),
- signature_index=int(json['signatureIndex']) if 'signatureIndex' in json else None,
- error_field=SignedExchangeErrorField.from_json(json['errorField']) if 'errorField' in json else None,
+ signature_index=int(json['signatureIndex']) if json.get('signatureIndex', None) is not None else None,
+ error_field=SignedExchangeErrorField.from_json(json['errorField']) if json.get('errorField', None) is not None else None,
)
@@ -1743,9 +1787,9 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> SignedExchangeInfo:
return cls(
outer_response=Response.from_json(json['outerResponse']),
- header=SignedExchangeHeader.from_json(json['header']) if 'header' in json else None,
- security_details=SecurityDetails.from_json(json['securityDetails']) if 'securityDetails' in json else None,
- errors=[SignedExchangeError.from_json(i) for i in json['errors']] if 'errors' in json else None,
+ header=SignedExchangeHeader.from_json(json['header']) if json.get('header', None) is not None else None,
+ security_details=SecurityDetails.from_json(json['securityDetails']) if json.get('securityDetails', None) is not None else None,
+ errors=[SignedExchangeError.from_json(i) for i in json['errors']] if json.get('errors', None) is not None else None,
)
@@ -1840,9 +1884,10 @@ def from_json(cls, json: T_JSON_DICT) -> ClientSecurityState:
class CrossOriginOpenerPolicyValue(enum.Enum):
SAME_ORIGIN = "SameOrigin"
SAME_ORIGIN_ALLOW_POPUPS = "SameOriginAllowPopups"
+ RESTRICT_PROPERTIES = "RestrictProperties"
UNSAFE_NONE = "UnsafeNone"
SAME_ORIGIN_PLUS_COEP = "SameOriginPlusCoep"
- SAME_ORIGIN_ALLOW_POPUPS_PLUS_COEP = "SameOriginAllowPopupsPlusCoep"
+ RESTRICT_PROPERTIES_PLUS_COEP = "RestrictPropertiesPlusCoep"
def to_json(self) -> str:
return self.value
@@ -1877,8 +1922,8 @@ def from_json(cls, json: T_JSON_DICT) -> CrossOriginOpenerPolicyStatus:
return cls(
value=CrossOriginOpenerPolicyValue.from_json(json['value']),
report_only_value=CrossOriginOpenerPolicyValue.from_json(json['reportOnlyValue']),
- reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None,
- report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None,
+ reporting_endpoint=str(json['reportingEndpoint']) if json.get('reportingEndpoint', None) is not None else None,
+ report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if json.get('reportOnlyReportingEndpoint', None) is not None else None,
)
@@ -1920,8 +1965,8 @@ def from_json(cls, json: T_JSON_DICT) -> CrossOriginEmbedderPolicyStatus:
return cls(
value=CrossOriginEmbedderPolicyValue.from_json(json['value']),
report_only_value=CrossOriginEmbedderPolicyValue.from_json(json['reportOnlyValue']),
- reporting_endpoint=str(json['reportingEndpoint']) if 'reportingEndpoint' in json else None,
- report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if 'reportOnlyReportingEndpoint' in json else None,
+ reporting_endpoint=str(json['reportingEndpoint']) if json.get('reportingEndpoint', None) is not None else None,
+ report_only_reporting_endpoint=str(json['reportOnlyReportingEndpoint']) if json.get('reportOnlyReportingEndpoint', None) is not None else None,
)
@@ -1942,8 +1987,8 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> SecurityIsolationStatus:
return cls(
- coop=CrossOriginOpenerPolicyStatus.from_json(json['coop']) if 'coop' in json else None,
- coep=CrossOriginEmbedderPolicyStatus.from_json(json['coep']) if 'coep' in json else None,
+ coop=CrossOriginOpenerPolicyStatus.from_json(json['coop']) if json.get('coop', None) is not None else None,
+ coep=CrossOriginEmbedderPolicyStatus.from_json(json['coep']) if json.get('coep', None) is not None else None,
)
@@ -2094,11 +2139,11 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> LoadNetworkResourcePageResult:
return cls(
success=bool(json['success']),
- net_error=float(json['netError']) if 'netError' in json else None,
- net_error_name=str(json['netErrorName']) if 'netErrorName' in json else None,
- http_status_code=float(json['httpStatusCode']) if 'httpStatusCode' in json else None,
- stream=io.StreamHandle.from_json(json['stream']) if 'stream' in json else None,
- headers=Headers.from_json(json['headers']) if 'headers' in json else None,
+ net_error=float(json['netError']) if json.get('netError', None) is not None else None,
+ net_error_name=str(json['netErrorName']) if json.get('netErrorName', None) is not None else None,
+ http_status_code=float(json['httpStatusCode']) if json.get('httpStatusCode', None) is not None else None,
+ stream=io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None,
+ headers=Headers.from_json(json['headers']) if json.get('headers', None) is not None else None,
)
@@ -2374,10 +2419,14 @@ def enable(
json = yield cmd_dict
+@deprecated(version="1.3")
def get_all_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[Cookie]]:
'''
Returns all browser cookies. Depending on the backend support, will return detailed cookie
information in the ``cookies`` field.
+ Deprecated. Use Storage.getCookies instead.
+
+ .. deprecated:: 1.3
:returns: Array of cookie objects.
'''
@@ -2959,9 +3008,9 @@ def from_json(cls, json: T_JSON_DICT) -> LoadingFailed:
timestamp=MonotonicTime.from_json(json['timestamp']),
type_=ResourceType.from_json(json['type']),
error_text=str(json['errorText']),
- canceled=bool(json['canceled']) if 'canceled' in json else None,
- blocked_reason=BlockedReason.from_json(json['blockedReason']) if 'blockedReason' in json else None,
- cors_error_status=CorsErrorStatus.from_json(json['corsErrorStatus']) if 'corsErrorStatus' in json else None
+ canceled=bool(json['canceled']) if json.get('canceled', None) is not None else None,
+ blocked_reason=BlockedReason.from_json(json['blockedReason']) if json.get('blockedReason', None) is not None else None,
+ cors_error_status=CorsErrorStatus.from_json(json['corsErrorStatus']) if json.get('corsErrorStatus', None) is not None else None
)
@@ -2987,7 +3036,7 @@ def from_json(cls, json: T_JSON_DICT) -> LoadingFinished:
request_id=RequestId.from_json(json['requestId']),
timestamp=MonotonicTime.from_json(json['timestamp']),
encoded_data_length=float(json['encodedDataLength']),
- should_report_corb_blocking=bool(json['shouldReportCorbBlocking']) if 'shouldReportCorbBlocking' in json else None
+ should_report_corb_blocking=bool(json['shouldReportCorbBlocking']) if json.get('shouldReportCorbBlocking', None) is not None else None
)
@@ -3042,13 +3091,13 @@ def from_json(cls, json: T_JSON_DICT) -> RequestIntercepted:
frame_id=page.FrameId.from_json(json['frameId']),
resource_type=ResourceType.from_json(json['resourceType']),
is_navigation_request=bool(json['isNavigationRequest']),
- is_download=bool(json['isDownload']) if 'isDownload' in json else None,
- redirect_url=str(json['redirectUrl']) if 'redirectUrl' in json else None,
- auth_challenge=AuthChallenge.from_json(json['authChallenge']) if 'authChallenge' in json else None,
- response_error_reason=ErrorReason.from_json(json['responseErrorReason']) if 'responseErrorReason' in json else None,
- response_status_code=int(json['responseStatusCode']) if 'responseStatusCode' in json else None,
- response_headers=Headers.from_json(json['responseHeaders']) if 'responseHeaders' in json else None,
- request_id=RequestId.from_json(json['requestId']) if 'requestId' in json else None
+ is_download=bool(json['isDownload']) if json.get('isDownload', None) is not None else None,
+ redirect_url=str(json['redirectUrl']) if json.get('redirectUrl', None) is not None else None,
+ auth_challenge=AuthChallenge.from_json(json['authChallenge']) if json.get('authChallenge', None) is not None else None,
+ response_error_reason=ErrorReason.from_json(json['responseErrorReason']) if json.get('responseErrorReason', None) is not None else None,
+ response_status_code=int(json['responseStatusCode']) if json.get('responseStatusCode', None) is not None else None,
+ response_headers=Headers.from_json(json['responseHeaders']) if json.get('responseHeaders', None) is not None else None,
+ request_id=RequestId.from_json(json['requestId']) if json.get('requestId', None) is not None else None
)
@@ -3112,10 +3161,10 @@ def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSent:
wall_time=TimeSinceEpoch.from_json(json['wallTime']),
initiator=Initiator.from_json(json['initiator']),
redirect_has_extra_info=bool(json['redirectHasExtraInfo']),
- redirect_response=Response.from_json(json['redirectResponse']) if 'redirectResponse' in json else None,
- type_=ResourceType.from_json(json['type']) if 'type' in json else None,
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None,
- has_user_gesture=bool(json['hasUserGesture']) if 'hasUserGesture' in json else None
+ redirect_response=Response.from_json(json['redirectResponse']) if json.get('redirectResponse', None) is not None else None,
+ type_=ResourceType.from_json(json['type']) if json.get('type', None) is not None else None,
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None,
+ has_user_gesture=bool(json['hasUserGesture']) if json.get('hasUserGesture', None) is not None else None
)
@@ -3195,7 +3244,7 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceived:
type_=ResourceType.from_json(json['type']),
response=Response.from_json(json['response']),
has_extra_info=bool(json['hasExtraInfo']),
- frame_id=page.FrameId.from_json(json['frameId']) if 'frameId' in json else None
+ frame_id=page.FrameId.from_json(json['frameId']) if json.get('frameId', None) is not None else None
)
@@ -3236,7 +3285,7 @@ def from_json(cls, json: T_JSON_DICT) -> WebSocketCreated:
return cls(
request_id=RequestId.from_json(json['requestId']),
url=str(json['url']),
- initiator=Initiator.from_json(json['initiator']) if 'initiator' in json else None
+ initiator=Initiator.from_json(json['initiator']) if json.get('initiator', None) is not None else None
)
@@ -3374,7 +3423,7 @@ def from_json(cls, json: T_JSON_DICT) -> WebTransportCreated:
transport_id=RequestId.from_json(json['transportId']),
url=str(json['url']),
timestamp=MonotonicTime.from_json(json['timestamp']),
- initiator=Initiator.from_json(json['initiator']) if 'initiator' in json else None
+ initiator=Initiator.from_json(json['initiator']) if json.get('initiator', None) is not None else None
)
@@ -3438,6 +3487,8 @@ class RequestWillBeSentExtraInfo:
connect_timing: ConnectTiming
#: The client security state set for the request.
client_security_state: typing.Optional[ClientSecurityState]
+ #: Whether the site has partitioned cookies stored in a partition different than the current one.
+ site_has_cookie_in_other_partition: typing.Optional[bool]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSentExtraInfo:
@@ -3446,7 +3497,8 @@ def from_json(cls, json: T_JSON_DICT) -> RequestWillBeSentExtraInfo:
associated_cookies=[BlockedCookieWithReason.from_json(i) for i in json['associatedCookies']],
headers=Headers.from_json(json['headers']),
connect_timing=ConnectTiming.from_json(json['connectTiming']),
- client_security_state=ClientSecurityState.from_json(json['clientSecurityState']) if 'clientSecurityState' in json else None
+ client_security_state=ClientSecurityState.from_json(json['clientSecurityState']) if json.get('clientSecurityState', None) is not None else None,
+ site_has_cookie_in_other_partition=bool(json['siteHasCookieInOtherPartition']) if json.get('siteHasCookieInOtherPartition', None) is not None else None
)
@@ -3478,6 +3530,11 @@ class ResponseReceivedExtraInfo:
#: Raw response header text as it was received over the wire. The raw text may not always be
#: available, such as in the case of HTTP/2 or QUIC.
headers_text: typing.Optional[str]
+ #: The cookie partition key that will be used to store partitioned cookies set in this response.
+ #: Only sent when partitioned cookies are enabled.
+ cookie_partition_key: typing.Optional[str]
+ #: True if partitioned cookies are enabled, but the partition key is not serializeable to string.
+ cookie_partition_key_opaque: typing.Optional[bool]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ResponseReceivedExtraInfo:
@@ -3487,7 +3544,9 @@ def from_json(cls, json: T_JSON_DICT) -> ResponseReceivedExtraInfo:
headers=Headers.from_json(json['headers']),
resource_ip_address_space=IPAddressSpace.from_json(json['resourceIPAddressSpace']),
status_code=int(json['statusCode']),
- headers_text=str(json['headersText']) if 'headersText' in json else None
+ headers_text=str(json['headersText']) if json.get('headersText', None) is not None else None,
+ cookie_partition_key=str(json['cookiePartitionKey']) if json.get('cookiePartitionKey', None) is not None else None,
+ cookie_partition_key_opaque=bool(json['cookiePartitionKeyOpaque']) if json.get('cookiePartitionKeyOpaque', None) is not None else None
)
@@ -3522,9 +3581,9 @@ def from_json(cls, json: T_JSON_DICT) -> TrustTokenOperationDone:
status=str(json['status']),
type_=TrustTokenOperationType.from_json(json['type']),
request_id=RequestId.from_json(json['requestId']),
- top_level_origin=str(json['topLevelOrigin']) if 'topLevelOrigin' in json else None,
- issuer_origin=str(json['issuerOrigin']) if 'issuerOrigin' in json else None,
- issued_token_count=int(json['issuedTokenCount']) if 'issuedTokenCount' in json else None
+ top_level_origin=str(json['topLevelOrigin']) if json.get('topLevelOrigin', None) is not None else None,
+ issuer_origin=str(json['issuerOrigin']) if json.get('issuerOrigin', None) is not None else None,
+ issued_token_count=int(json['issuedTokenCount']) if json.get('issuedTokenCount', None) is not None else None
)
@@ -3594,7 +3653,7 @@ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseParsed
return cls(
inner_request_id=RequestId.from_json(json['innerRequestId']),
inner_request_url=str(json['innerRequestURL']),
- bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None
+ bundle_request_id=RequestId.from_json(json['bundleRequestId']) if json.get('bundleRequestId', None) is not None else None
)
@@ -3623,7 +3682,7 @@ def from_json(cls, json: T_JSON_DICT) -> SubresourceWebBundleInnerResponseError:
inner_request_id=RequestId.from_json(json['innerRequestId']),
inner_request_url=str(json['innerRequestURL']),
error_message=str(json['errorMessage']),
- bundle_request_id=RequestId.from_json(json['bundleRequestId']) if 'bundleRequestId' in json else None
+ bundle_request_id=RequestId.from_json(json['bundleRequestId']) if json.get('bundleRequestId', None) is not None else None
)
diff --git a/pycdp/cdp/overlay.py b/pycdp/cdp/overlay.py
index 4d72822..dad0e6a 100644
--- a/pycdp/cdp/overlay.py
+++ b/pycdp/cdp/overlay.py
@@ -154,26 +154,26 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> GridHighlightConfig:
return cls(
- show_grid_extension_lines=bool(json['showGridExtensionLines']) if 'showGridExtensionLines' in json else None,
- show_positive_line_numbers=bool(json['showPositiveLineNumbers']) if 'showPositiveLineNumbers' in json else None,
- show_negative_line_numbers=bool(json['showNegativeLineNumbers']) if 'showNegativeLineNumbers' in json else None,
- show_area_names=bool(json['showAreaNames']) if 'showAreaNames' in json else None,
- show_line_names=bool(json['showLineNames']) if 'showLineNames' in json else None,
- show_track_sizes=bool(json['showTrackSizes']) if 'showTrackSizes' in json else None,
- grid_border_color=dom.RGBA.from_json(json['gridBorderColor']) if 'gridBorderColor' in json else None,
- cell_border_color=dom.RGBA.from_json(json['cellBorderColor']) if 'cellBorderColor' in json else None,
- row_line_color=dom.RGBA.from_json(json['rowLineColor']) if 'rowLineColor' in json else None,
- column_line_color=dom.RGBA.from_json(json['columnLineColor']) if 'columnLineColor' in json else None,
- grid_border_dash=bool(json['gridBorderDash']) if 'gridBorderDash' in json else None,
- cell_border_dash=bool(json['cellBorderDash']) if 'cellBorderDash' in json else None,
- row_line_dash=bool(json['rowLineDash']) if 'rowLineDash' in json else None,
- column_line_dash=bool(json['columnLineDash']) if 'columnLineDash' in json else None,
- row_gap_color=dom.RGBA.from_json(json['rowGapColor']) if 'rowGapColor' in json else None,
- row_hatch_color=dom.RGBA.from_json(json['rowHatchColor']) if 'rowHatchColor' in json else None,
- column_gap_color=dom.RGBA.from_json(json['columnGapColor']) if 'columnGapColor' in json else None,
- column_hatch_color=dom.RGBA.from_json(json['columnHatchColor']) if 'columnHatchColor' in json else None,
- area_border_color=dom.RGBA.from_json(json['areaBorderColor']) if 'areaBorderColor' in json else None,
- grid_background_color=dom.RGBA.from_json(json['gridBackgroundColor']) if 'gridBackgroundColor' in json else None,
+ show_grid_extension_lines=bool(json['showGridExtensionLines']) if json.get('showGridExtensionLines', None) is not None else None,
+ show_positive_line_numbers=bool(json['showPositiveLineNumbers']) if json.get('showPositiveLineNumbers', None) is not None else None,
+ show_negative_line_numbers=bool(json['showNegativeLineNumbers']) if json.get('showNegativeLineNumbers', None) is not None else None,
+ show_area_names=bool(json['showAreaNames']) if json.get('showAreaNames', None) is not None else None,
+ show_line_names=bool(json['showLineNames']) if json.get('showLineNames', None) is not None else None,
+ show_track_sizes=bool(json['showTrackSizes']) if json.get('showTrackSizes', None) is not None else None,
+ grid_border_color=dom.RGBA.from_json(json['gridBorderColor']) if json.get('gridBorderColor', None) is not None else None,
+ cell_border_color=dom.RGBA.from_json(json['cellBorderColor']) if json.get('cellBorderColor', None) is not None else None,
+ row_line_color=dom.RGBA.from_json(json['rowLineColor']) if json.get('rowLineColor', None) is not None else None,
+ column_line_color=dom.RGBA.from_json(json['columnLineColor']) if json.get('columnLineColor', None) is not None else None,
+ grid_border_dash=bool(json['gridBorderDash']) if json.get('gridBorderDash', None) is not None else None,
+ cell_border_dash=bool(json['cellBorderDash']) if json.get('cellBorderDash', None) is not None else None,
+ row_line_dash=bool(json['rowLineDash']) if json.get('rowLineDash', None) is not None else None,
+ column_line_dash=bool(json['columnLineDash']) if json.get('columnLineDash', None) is not None else None,
+ row_gap_color=dom.RGBA.from_json(json['rowGapColor']) if json.get('rowGapColor', None) is not None else None,
+ row_hatch_color=dom.RGBA.from_json(json['rowHatchColor']) if json.get('rowHatchColor', None) is not None else None,
+ column_gap_color=dom.RGBA.from_json(json['columnGapColor']) if json.get('columnGapColor', None) is not None else None,
+ column_hatch_color=dom.RGBA.from_json(json['columnHatchColor']) if json.get('columnHatchColor', None) is not None else None,
+ area_border_color=dom.RGBA.from_json(json['areaBorderColor']) if json.get('areaBorderColor', None) is not None else None,
+ grid_background_color=dom.RGBA.from_json(json['gridBackgroundColor']) if json.get('gridBackgroundColor', None) is not None else None,
)
@@ -229,14 +229,14 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FlexContainerHighlightConfig:
return cls(
- container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None,
- line_separator=LineStyle.from_json(json['lineSeparator']) if 'lineSeparator' in json else None,
- item_separator=LineStyle.from_json(json['itemSeparator']) if 'itemSeparator' in json else None,
- main_distributed_space=BoxStyle.from_json(json['mainDistributedSpace']) if 'mainDistributedSpace' in json else None,
- cross_distributed_space=BoxStyle.from_json(json['crossDistributedSpace']) if 'crossDistributedSpace' in json else None,
- row_gap_space=BoxStyle.from_json(json['rowGapSpace']) if 'rowGapSpace' in json else None,
- column_gap_space=BoxStyle.from_json(json['columnGapSpace']) if 'columnGapSpace' in json else None,
- cross_alignment=LineStyle.from_json(json['crossAlignment']) if 'crossAlignment' in json else None,
+ container_border=LineStyle.from_json(json['containerBorder']) if json.get('containerBorder', None) is not None else None,
+ line_separator=LineStyle.from_json(json['lineSeparator']) if json.get('lineSeparator', None) is not None else None,
+ item_separator=LineStyle.from_json(json['itemSeparator']) if json.get('itemSeparator', None) is not None else None,
+ main_distributed_space=BoxStyle.from_json(json['mainDistributedSpace']) if json.get('mainDistributedSpace', None) is not None else None,
+ cross_distributed_space=BoxStyle.from_json(json['crossDistributedSpace']) if json.get('crossDistributedSpace', None) is not None else None,
+ row_gap_space=BoxStyle.from_json(json['rowGapSpace']) if json.get('rowGapSpace', None) is not None else None,
+ column_gap_space=BoxStyle.from_json(json['columnGapSpace']) if json.get('columnGapSpace', None) is not None else None,
+ cross_alignment=LineStyle.from_json(json['crossAlignment']) if json.get('crossAlignment', None) is not None else None,
)
@@ -267,9 +267,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FlexItemHighlightConfig:
return cls(
- base_size_box=BoxStyle.from_json(json['baseSizeBox']) if 'baseSizeBox' in json else None,
- base_size_border=LineStyle.from_json(json['baseSizeBorder']) if 'baseSizeBorder' in json else None,
- flexibility_arrow=LineStyle.from_json(json['flexibilityArrow']) if 'flexibilityArrow' in json else None,
+ base_size_box=BoxStyle.from_json(json['baseSizeBox']) if json.get('baseSizeBox', None) is not None else None,
+ base_size_border=LineStyle.from_json(json['baseSizeBorder']) if json.get('baseSizeBorder', None) is not None else None,
+ flexibility_arrow=LineStyle.from_json(json['flexibilityArrow']) if json.get('flexibilityArrow', None) is not None else None,
)
@@ -295,8 +295,8 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> LineStyle:
return cls(
- color=dom.RGBA.from_json(json['color']) if 'color' in json else None,
- pattern=str(json['pattern']) if 'pattern' in json else None,
+ color=dom.RGBA.from_json(json['color']) if json.get('color', None) is not None else None,
+ pattern=str(json['pattern']) if json.get('pattern', None) is not None else None,
)
@@ -322,8 +322,8 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> BoxStyle:
return cls(
- fill_color=dom.RGBA.from_json(json['fillColor']) if 'fillColor' in json else None,
- hatch_color=dom.RGBA.from_json(json['hatchColor']) if 'hatchColor' in json else None,
+ fill_color=dom.RGBA.from_json(json['fillColor']) if json.get('fillColor', None) is not None else None,
+ hatch_color=dom.RGBA.from_json(json['hatchColor']) if json.get('hatchColor', None) is not None else None,
)
@@ -447,31 +447,32 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> HighlightConfig:
return cls(
- show_info=bool(json['showInfo']) if 'showInfo' in json else None,
- show_styles=bool(json['showStyles']) if 'showStyles' in json else None,
- show_rulers=bool(json['showRulers']) if 'showRulers' in json else None,
- show_accessibility_info=bool(json['showAccessibilityInfo']) if 'showAccessibilityInfo' in json else None,
- show_extension_lines=bool(json['showExtensionLines']) if 'showExtensionLines' in json else None,
- content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None,
- padding_color=dom.RGBA.from_json(json['paddingColor']) if 'paddingColor' in json else None,
- border_color=dom.RGBA.from_json(json['borderColor']) if 'borderColor' in json else None,
- margin_color=dom.RGBA.from_json(json['marginColor']) if 'marginColor' in json else None,
- event_target_color=dom.RGBA.from_json(json['eventTargetColor']) if 'eventTargetColor' in json else None,
- shape_color=dom.RGBA.from_json(json['shapeColor']) if 'shapeColor' in json else None,
- shape_margin_color=dom.RGBA.from_json(json['shapeMarginColor']) if 'shapeMarginColor' in json else None,
- css_grid_color=dom.RGBA.from_json(json['cssGridColor']) if 'cssGridColor' in json else None,
- color_format=ColorFormat.from_json(json['colorFormat']) if 'colorFormat' in json else None,
- grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']) if 'gridHighlightConfig' in json else None,
- flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']) if 'flexContainerHighlightConfig' in json else None,
- flex_item_highlight_config=FlexItemHighlightConfig.from_json(json['flexItemHighlightConfig']) if 'flexItemHighlightConfig' in json else None,
- contrast_algorithm=ContrastAlgorithm.from_json(json['contrastAlgorithm']) if 'contrastAlgorithm' in json else None,
- container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']) if 'containerQueryContainerHighlightConfig' in json else None,
+ show_info=bool(json['showInfo']) if json.get('showInfo', None) is not None else None,
+ show_styles=bool(json['showStyles']) if json.get('showStyles', None) is not None else None,
+ show_rulers=bool(json['showRulers']) if json.get('showRulers', None) is not None else None,
+ show_accessibility_info=bool(json['showAccessibilityInfo']) if json.get('showAccessibilityInfo', None) is not None else None,
+ show_extension_lines=bool(json['showExtensionLines']) if json.get('showExtensionLines', None) is not None else None,
+ content_color=dom.RGBA.from_json(json['contentColor']) if json.get('contentColor', None) is not None else None,
+ padding_color=dom.RGBA.from_json(json['paddingColor']) if json.get('paddingColor', None) is not None else None,
+ border_color=dom.RGBA.from_json(json['borderColor']) if json.get('borderColor', None) is not None else None,
+ margin_color=dom.RGBA.from_json(json['marginColor']) if json.get('marginColor', None) is not None else None,
+ event_target_color=dom.RGBA.from_json(json['eventTargetColor']) if json.get('eventTargetColor', None) is not None else None,
+ shape_color=dom.RGBA.from_json(json['shapeColor']) if json.get('shapeColor', None) is not None else None,
+ shape_margin_color=dom.RGBA.from_json(json['shapeMarginColor']) if json.get('shapeMarginColor', None) is not None else None,
+ css_grid_color=dom.RGBA.from_json(json['cssGridColor']) if json.get('cssGridColor', None) is not None else None,
+ color_format=ColorFormat.from_json(json['colorFormat']) if json.get('colorFormat', None) is not None else None,
+ grid_highlight_config=GridHighlightConfig.from_json(json['gridHighlightConfig']) if json.get('gridHighlightConfig', None) is not None else None,
+ flex_container_highlight_config=FlexContainerHighlightConfig.from_json(json['flexContainerHighlightConfig']) if json.get('flexContainerHighlightConfig', None) is not None else None,
+ flex_item_highlight_config=FlexItemHighlightConfig.from_json(json['flexItemHighlightConfig']) if json.get('flexItemHighlightConfig', None) is not None else None,
+ contrast_algorithm=ContrastAlgorithm.from_json(json['contrastAlgorithm']) if json.get('contrastAlgorithm', None) is not None else None,
+ container_query_container_highlight_config=ContainerQueryContainerHighlightConfig.from_json(json['containerQueryContainerHighlightConfig']) if json.get('containerQueryContainerHighlightConfig', None) is not None else None,
)
class ColorFormat(enum.Enum):
RGB = "rgb"
HSL = "hsl"
+ HWB = "hwb"
HEX_ = "hex"
def to_json(self) -> str:
@@ -558,10 +559,10 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ScrollSnapContainerHighlightConfig:
return cls(
- snapport_border=LineStyle.from_json(json['snapportBorder']) if 'snapportBorder' in json else None,
- snap_area_border=LineStyle.from_json(json['snapAreaBorder']) if 'snapAreaBorder' in json else None,
- scroll_margin_color=dom.RGBA.from_json(json['scrollMarginColor']) if 'scrollMarginColor' in json else None,
- scroll_padding_color=dom.RGBA.from_json(json['scrollPaddingColor']) if 'scrollPaddingColor' in json else None,
+ snapport_border=LineStyle.from_json(json['snapportBorder']) if json.get('snapportBorder', None) is not None else None,
+ snap_area_border=LineStyle.from_json(json['snapAreaBorder']) if json.get('snapAreaBorder', None) is not None else None,
+ scroll_margin_color=dom.RGBA.from_json(json['scrollMarginColor']) if json.get('scrollMarginColor', None) is not None else None,
+ scroll_padding_color=dom.RGBA.from_json(json['scrollPaddingColor']) if json.get('scrollPaddingColor', None) is not None else None,
)
@@ -614,8 +615,8 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> HingeConfig:
return cls(
rect=dom.Rect.from_json(json['rect']),
- content_color=dom.RGBA.from_json(json['contentColor']) if 'contentColor' in json else None,
- outline_color=dom.RGBA.from_json(json['outlineColor']) if 'outlineColor' in json else None,
+ content_color=dom.RGBA.from_json(json['contentColor']) if json.get('contentColor', None) is not None else None,
+ outline_color=dom.RGBA.from_json(json['outlineColor']) if json.get('outlineColor', None) is not None else None,
)
@@ -660,8 +661,8 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ContainerQueryContainerHighlightConfig:
return cls(
- container_border=LineStyle.from_json(json['containerBorder']) if 'containerBorder' in json else None,
- descendant_border=LineStyle.from_json(json['descendantBorder']) if 'descendantBorder' in json else None,
+ container_border=LineStyle.from_json(json['containerBorder']) if json.get('containerBorder', None) is not None else None,
+ descendant_border=LineStyle.from_json(json['descendantBorder']) if json.get('descendantBorder', None) is not None else None,
)
@@ -711,9 +712,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> IsolationModeHighlightConfig:
return cls(
- resizer_color=dom.RGBA.from_json(json['resizerColor']) if 'resizerColor' in json else None,
- resizer_handle_color=dom.RGBA.from_json(json['resizerHandleColor']) if 'resizerHandleColor' in json else None,
- mask_color=dom.RGBA.from_json(json['maskColor']) if 'maskColor' in json else None,
+ resizer_color=dom.RGBA.from_json(json['resizerColor']) if json.get('resizerColor', None) is not None else None,
+ resizer_handle_color=dom.RGBA.from_json(json['resizerHandleColor']) if json.get('resizerHandleColor', None) is not None else None,
+ mask_color=dom.RGBA.from_json(json['maskColor']) if json.get('maskColor', None) is not None else None,
)
diff --git a/pycdp/cdp/page.py b/pycdp/cdp/page.py
index 44e93e8..c2f3a21 100644
--- a/pycdp/cdp/page.py
+++ b/pycdp/cdp/page.py
@@ -84,7 +84,34 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> AdFrameStatus:
return cls(
ad_frame_type=AdFrameType.from_json(json['adFrameType']),
- explanations=[AdFrameExplanation.from_json(i) for i in json['explanations']] if 'explanations' in json else None,
+ explanations=[AdFrameExplanation.from_json(i) for i in json['explanations']] if json.get('explanations', None) is not None else None,
+ )
+
+
+@dataclass
+class AdScriptId:
+ '''
+ Identifies the bottom-most script which caused the frame to be labelled
+ as an ad.
+ '''
+ #: Script Id of the bottom-most script which caused the frame to be labelled
+ #: as an ad.
+ script_id: runtime.ScriptId
+
+ #: Id of adScriptId's debugger.
+ debugger_id: runtime.UniqueDebuggerId
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['scriptId'] = self.script_id.to_json()
+ json['debuggerId'] = self.debugger_id.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> AdScriptId:
+ return cls(
+ script_id=runtime.ScriptId.from_json(json['scriptId']),
+ debugger_id=runtime.UniqueDebuggerId.from_json(json['debuggerId']),
)
@@ -144,13 +171,17 @@ class PermissionsPolicyFeature(enum.Enum):
AMBIENT_LIGHT_SENSOR = "ambient-light-sensor"
ATTRIBUTION_REPORTING = "attribution-reporting"
AUTOPLAY = "autoplay"
+ BLUETOOTH = "bluetooth"
+ BROWSING_TOPICS = "browsing-topics"
CAMERA = "camera"
CH_DPR = "ch-dpr"
CH_DEVICE_MEMORY = "ch-device-memory"
CH_DOWNLINK = "ch-downlink"
CH_ECT = "ch-ect"
CH_PREFERS_COLOR_SCHEME = "ch-prefers-color-scheme"
+ CH_PREFERS_REDUCED_MOTION = "ch-prefers-reduced-motion"
CH_RTT = "ch-rtt"
+ CH_SAVE_DATA = "ch-save-data"
CH_UA = "ch-ua"
CH_UA_ARCH = "ch-ua-arch"
CH_UA_BITNESS = "ch-ua-bitness"
@@ -166,9 +197,9 @@ class PermissionsPolicyFeature(enum.Enum):
CH_VIEWPORT_HEIGHT = "ch-viewport-height"
CH_VIEWPORT_WIDTH = "ch-viewport-width"
CH_WIDTH = "ch-width"
- CH_PARTITIONED_COOKIES = "ch-partitioned-cookies"
CLIPBOARD_READ = "clipboard-read"
CLIPBOARD_WRITE = "clipboard-write"
+ COMPUTE_PRESSURE = "compute-pressure"
CROSS_ORIGIN_ISOLATED = "cross-origin-isolated"
DIRECT_SOCKETS = "direct-sockets"
DISPLAY_CAPTURE = "display-capture"
@@ -183,26 +214,35 @@ class PermissionsPolicyFeature(enum.Enum):
GEOLOCATION = "geolocation"
GYROSCOPE = "gyroscope"
HID = "hid"
+ IDENTITY_CREDENTIALS_GET = "identity-credentials-get"
IDLE_DETECTION = "idle-detection"
+ INTEREST_COHORT = "interest-cohort"
JOIN_AD_INTEREST_GROUP = "join-ad-interest-group"
KEYBOARD_MAP = "keyboard-map"
+ LOCAL_FONTS = "local-fonts"
MAGNETOMETER = "magnetometer"
MICROPHONE = "microphone"
MIDI = "midi"
OTP_CREDENTIALS = "otp-credentials"
PAYMENT = "payment"
PICTURE_IN_PICTURE = "picture-in-picture"
+ PRIVATE_AGGREGATION = "private-aggregation"
PUBLICKEY_CREDENTIALS_GET = "publickey-credentials-get"
RUN_AD_AUCTION = "run-ad-auction"
SCREEN_WAKE_LOCK = "screen-wake-lock"
SERIAL = "serial"
SHARED_AUTOFILL = "shared-autofill"
- STORAGE_ACCESS_API = "storage-access-api"
+ SHARED_STORAGE = "shared-storage"
+ SHARED_STORAGE_SELECT_URL = "shared-storage-select-url"
+ SMART_CARD = "smart-card"
+ STORAGE_ACCESS = "storage-access"
SYNC_XHR = "sync-xhr"
TRUST_TOKEN_REDEMPTION = "trust-token-redemption"
+ UNLOAD = "unload"
USB = "usb"
VERTICAL_SCROLL = "vertical-scroll"
WEB_SHARE = "web-share"
+ WINDOW_MANAGEMENT = "window-management"
WINDOW_PLACEMENT = "window-placement"
XR_SPATIAL_TRACKING = "xr-spatial-tracking"
@@ -221,6 +261,7 @@ class PermissionsPolicyBlockReason(enum.Enum):
HEADER = "Header"
IFRAME_ATTRIBUTE = "IframeAttribute"
IN_FENCED_FRAME_TREE = "InFencedFrameTree"
+ IN_ISOLATED_APP = "InIsolatedApp"
def to_json(self) -> str:
return self.value
@@ -271,7 +312,7 @@ def from_json(cls, json: T_JSON_DICT) -> PermissionsPolicyFeatureState:
return cls(
feature=PermissionsPolicyFeature.from_json(json['feature']),
allowed=bool(json['allowed']),
- locator=PermissionsPolicyBlockLocator.from_json(json['locator']) if 'locator' in json else None,
+ locator=PermissionsPolicyBlockLocator.from_json(json['locator']) if json.get('locator', None) is not None else None,
)
@@ -389,7 +430,7 @@ def from_json(cls, json: T_JSON_DICT) -> OriginTrialTokenWithStatus:
return cls(
raw_token_text=str(json['rawTokenText']),
status=OriginTrialTokenStatus.from_json(json['status']),
- parsed_token=OriginTrialToken.from_json(json['parsedToken']) if 'parsedToken' in json else None,
+ parsed_token=OriginTrialToken.from_json(json['parsedToken']) if json.get('parsedToken', None) is not None else None,
)
@@ -502,11 +543,11 @@ def from_json(cls, json: T_JSON_DICT) -> Frame:
secure_context_type=SecureContextType.from_json(json['secureContextType']),
cross_origin_isolated_context_type=CrossOriginIsolatedContextType.from_json(json['crossOriginIsolatedContextType']),
gated_api_features=[GatedAPIFeatures.from_json(i) for i in json['gatedAPIFeatures']],
- parent_id=FrameId.from_json(json['parentId']) if 'parentId' in json else None,
- name=str(json['name']) if 'name' in json else None,
- url_fragment=str(json['urlFragment']) if 'urlFragment' in json else None,
- unreachable_url=str(json['unreachableUrl']) if 'unreachableUrl' in json else None,
- ad_frame_status=AdFrameStatus.from_json(json['adFrameStatus']) if 'adFrameStatus' in json else None,
+ parent_id=FrameId.from_json(json['parentId']) if json.get('parentId', None) is not None else None,
+ name=str(json['name']) if json.get('name', None) is not None else None,
+ url_fragment=str(json['urlFragment']) if json.get('urlFragment', None) is not None else None,
+ unreachable_url=str(json['unreachableUrl']) if json.get('unreachableUrl', None) is not None else None,
+ ad_frame_status=AdFrameStatus.from_json(json['adFrameStatus']) if json.get('adFrameStatus', None) is not None else None,
)
@@ -557,10 +598,10 @@ def from_json(cls, json: T_JSON_DICT) -> FrameResource:
url=str(json['url']),
type_=network.ResourceType.from_json(json['type']),
mime_type=str(json['mimeType']),
- last_modified=network.TimeSinceEpoch.from_json(json['lastModified']) if 'lastModified' in json else None,
- content_size=float(json['contentSize']) if 'contentSize' in json else None,
- failed=bool(json['failed']) if 'failed' in json else None,
- canceled=bool(json['canceled']) if 'canceled' in json else None,
+ last_modified=network.TimeSinceEpoch.from_json(json['lastModified']) if json.get('lastModified', None) is not None else None,
+ content_size=float(json['contentSize']) if json.get('contentSize', None) is not None else None,
+ failed=bool(json['failed']) if json.get('failed', None) is not None else None,
+ canceled=bool(json['canceled']) if json.get('canceled', None) is not None else None,
)
@@ -591,7 +632,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameResourceTree:
return cls(
frame=Frame.from_json(json['frame']),
resources=[FrameResource.from_json(i) for i in json['resources']],
- child_frames=[FrameResourceTree.from_json(i) for i in json['childFrames']] if 'childFrames' in json else None,
+ child_frames=[FrameResourceTree.from_json(i) for i in json['childFrames']] if json.get('childFrames', None) is not None else None,
)
@@ -617,7 +658,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> FrameTree:
return cls(
frame=Frame.from_json(json['frame']),
- child_frames=[FrameTree.from_json(i) for i in json['childFrames']] if 'childFrames' in json else None,
+ child_frames=[FrameTree.from_json(i) for i in json['childFrames']] if json.get('childFrames', None) is not None else None,
)
@@ -749,7 +790,7 @@ def from_json(cls, json: T_JSON_DICT) -> ScreencastFrameMetadata:
device_height=float(json['deviceHeight']),
scroll_offset_x=float(json['scrollOffsetX']),
scroll_offset_y=float(json['scrollOffsetY']),
- timestamp=network.TimeSinceEpoch.from_json(json['timestamp']) if 'timestamp' in json else None,
+ timestamp=network.TimeSinceEpoch.from_json(json['timestamp']) if json.get('timestamp', None) is not None else None,
)
@@ -912,7 +953,7 @@ def from_json(cls, json: T_JSON_DICT) -> VisualViewport:
client_width=float(json['clientWidth']),
client_height=float(json['clientHeight']),
scale=float(json['scale']),
- zoom=float(json['zoom']) if 'zoom' in json else None,
+ zoom=float(json['zoom']) if json.get('zoom', None) is not None else None,
)
@@ -979,8 +1020,8 @@ class FontFamilies:
#: The fantasy font-family.
fantasy: typing.Optional[str] = None
- #: The pictograph font-family.
- pictograph: typing.Optional[str] = None
+ #: The math font-family.
+ math: typing.Optional[str] = None
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
@@ -996,20 +1037,20 @@ def to_json(self) -> T_JSON_DICT:
json['cursive'] = self.cursive
if self.fantasy is not None:
json['fantasy'] = self.fantasy
- if self.pictograph is not None:
- json['pictograph'] = self.pictograph
+ if self.math is not None:
+ json['math'] = self.math
return json
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FontFamilies:
return cls(
- standard=str(json['standard']) if 'standard' in json else None,
- fixed=str(json['fixed']) if 'fixed' in json else None,
- serif=str(json['serif']) if 'serif' in json else None,
- sans_serif=str(json['sansSerif']) if 'sansSerif' in json else None,
- cursive=str(json['cursive']) if 'cursive' in json else None,
- fantasy=str(json['fantasy']) if 'fantasy' in json else None,
- pictograph=str(json['pictograph']) if 'pictograph' in json else None,
+ standard=str(json['standard']) if json.get('standard', None) is not None else None,
+ fixed=str(json['fixed']) if json.get('fixed', None) is not None else None,
+ serif=str(json['serif']) if json.get('serif', None) is not None else None,
+ sans_serif=str(json['sansSerif']) if json.get('sansSerif', None) is not None else None,
+ cursive=str(json['cursive']) if json.get('cursive', None) is not None else None,
+ fantasy=str(json['fantasy']) if json.get('fantasy', None) is not None else None,
+ math=str(json['math']) if json.get('math', None) is not None else None,
)
@@ -1060,8 +1101,8 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FontSizes:
return cls(
- standard=int(json['standard']) if 'standard' in json else None,
- fixed=int(json['fixed']) if 'fixed' in json else None,
+ standard=int(json['standard']) if json.get('standard', None) is not None else None,
+ fixed=int(json['fixed']) if json.get('fixed', None) is not None else None,
)
@@ -1188,10 +1229,27 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> CompilationCacheParams:
return cls(
url=str(json['url']),
- eager=bool(json['eager']) if 'eager' in json else None,
+ eager=bool(json['eager']) if json.get('eager', None) is not None else None,
)
+class AutoResponseMode(enum.Enum):
+ '''
+ Enum of possible auto-reponse for permisison / prompt dialogs.
+ '''
+ NONE = "none"
+ AUTO_ACCEPT = "autoAccept"
+ AUTO_REJECT = "autoReject"
+ AUTO_OPT_OUT = "autoOptOut"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> AutoResponseMode:
+ return cls(json)
+
+
class NavigationType(enum.Enum):
'''
The type of a frameNavigated event.
@@ -1227,7 +1285,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
JAVA_SCRIPT_EXECUTION = "JavaScriptExecution"
RENDERER_PROCESS_KILLED = "RendererProcessKilled"
RENDERER_PROCESS_CRASHED = "RendererProcessCrashed"
- GRANTED_MEDIA_STREAM_ACCESS = "GrantedMediaStreamAccess"
SCHEDULER_TRACKED_FEATURE_USED = "SchedulerTrackedFeatureUsed"
CONFLICTING_BROWSING_INSTANCE = "ConflictingBrowsingInstance"
CACHE_FLUSHED = "CacheFlushed"
@@ -1254,7 +1311,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
FOREGROUND_CACHE_LIMIT = "ForegroundCacheLimit"
BROWSING_INSTANCE_NOT_SWAPPED = "BrowsingInstanceNotSwapped"
BACK_FORWARD_CACHE_DISABLED_FOR_DELEGATE = "BackForwardCacheDisabledForDelegate"
- OPT_IN_UNLOAD_HEADER_NOT_PRESENT = "OptInUnloadHeaderNotPresent"
UNLOAD_HANDLER_EXISTS_IN_MAIN_FRAME = "UnloadHandlerExistsInMainFrame"
UNLOAD_HANDLER_EXISTS_IN_SUB_FRAME = "UnloadHandlerExistsInSubFrame"
SERVICE_WORKER_UNREGISTRATION = "ServiceWorkerUnregistration"
@@ -1264,6 +1320,8 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
NO_RESPONSE_HEAD = "NoResponseHead"
UNKNOWN = "Unknown"
ACTIVATION_NAVIGATIONS_DISALLOWED_FOR_BUG1234857 = "ActivationNavigationsDisallowedForBug1234857"
+ ERROR_DOCUMENT = "ErrorDocument"
+ FENCED_FRAMES_EMBEDDER = "FencedFramesEmbedder"
WEB_SOCKET = "WebSocket"
WEB_TRANSPORT = "WebTransport"
WEB_RTC = "WebRTC"
@@ -1276,7 +1334,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
DEDICATED_WORKER_OR_WORKLET = "DedicatedWorkerOrWorklet"
OUTSTANDING_NETWORK_REQUEST_OTHERS = "OutstandingNetworkRequestOthers"
OUTSTANDING_INDEXED_DB_TRANSACTION = "OutstandingIndexedDBTransaction"
- REQUESTED_NOTIFICATIONS_PERMISSION = "RequestedNotificationsPermission"
REQUESTED_MIDI_PERMISSION = "RequestedMIDIPermission"
REQUESTED_AUDIO_CAPTURE_PERMISSION = "RequestedAudioCapturePermission"
REQUESTED_VIDEO_CAPTURE_PERMISSION = "RequestedVideoCapturePermission"
@@ -1307,7 +1364,10 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
OUTSTANDING_NETWORK_REQUEST_DIRECT_SOCKET = "OutstandingNetworkRequestDirectSocket"
INJECTED_JAVASCRIPT = "InjectedJavascript"
INJECTED_STYLE_SHEET = "InjectedStyleSheet"
+ KEEPALIVE_REQUEST = "KeepaliveRequest"
+ INDEXED_DB_EVENT = "IndexedDBEvent"
DUMMY = "Dummy"
+ AUTHORIZATION_HEADER = "AuthorizationHeader"
CONTENT_SECURITY_HANDLER = "ContentSecurityHandler"
CONTENT_WEB_AUTHENTICATION_API = "ContentWebAuthenticationAPI"
CONTENT_FILE_CHOOSER = "ContentFileChooser"
@@ -1316,7 +1376,6 @@ class BackForwardCacheNotRestoredReason(enum.Enum):
CONTENT_MEDIA_DEVICES_DISPATCHER_HOST = "ContentMediaDevicesDispatcherHost"
CONTENT_WEB_BLUETOOTH = "ContentWebBluetooth"
CONTENT_WEB_USB = "ContentWebUSB"
- CONTENT_MEDIA_SESSION = "ContentMediaSession"
CONTENT_MEDIA_SESSION_SERVICE = "ContentMediaSessionService"
CONTENT_SCREEN_READER = "ContentScreenReader"
EMBEDDER_POPUP_BLOCKER_TAB_HELPER = "EmbedderPopupBlockerTabHelper"
@@ -1367,10 +1426,17 @@ class BackForwardCacheNotRestoredExplanation:
#: Not restored reason
reason: BackForwardCacheNotRestoredReason
+ #: Context associated with the reason. The meaning of this context is
+ #: dependent on the reason:
+ #: - EmbedderExtensionSentMessageToCachedFrame: the extension ID.
+ context: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['type'] = self.type_.to_json()
json['reason'] = self.reason.to_json()
+ if self.context is not None:
+ json['context'] = self.context
return json
@classmethod
@@ -1378,6 +1444,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotRestoredExplanation:
return cls(
type_=BackForwardCacheNotRestoredReasonType.from_json(json['type']),
reason=BackForwardCacheNotRestoredReason.from_json(json['reason']),
+ context=str(json['context']) if json.get('context', None) is not None else None,
)
@@ -1474,7 +1541,8 @@ def capture_screenshot(
quality: typing.Optional[int] = None,
clip: typing.Optional[Viewport] = None,
from_surface: typing.Optional[bool] = None,
- capture_beyond_viewport: typing.Optional[bool] = None
+ capture_beyond_viewport: typing.Optional[bool] = None,
+ optimize_for_speed: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,str]:
'''
Capture page screenshot.
@@ -1484,6 +1552,7 @@ def capture_screenshot(
:param clip: *(Optional)* Capture the screenshot of a given region only.
:param from_surface: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot from the surface, rather than the view. Defaults to true.
:param capture_beyond_viewport: **(EXPERIMENTAL)** *(Optional)* Capture the screenshot beyond the viewport. Defaults to false.
+ :param optimize_for_speed: **(EXPERIMENTAL)** *(Optional)* Optimize image encoding for speed, not for resulting size (defaults to false)
:returns: Base64-encoded image data. (Encoded as a base64 string when passed over JSON)
'''
params: T_JSON_DICT = dict()
@@ -1497,6 +1566,8 @@ def capture_screenshot(
params['fromSurface'] = from_surface
if capture_beyond_viewport is not None:
params['captureBeyondViewport'] = capture_beyond_viewport
+ if optimize_for_speed is not None:
+ params['optimizeForSpeed'] = optimize_for_speed
cmd_dict: T_JSON_DICT = {
'method': 'Page.captureScreenshot',
'params': params,
@@ -1661,8 +1732,8 @@ def get_app_manifest() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[
return (
str(json['url']),
[AppManifestError.from_json(i) for i in json['errors']],
- str(json['data']) if 'data' in json else None,
- AppManifestParsedProperties.from_json(json['parsed']) if 'parsed' in json else None
+ str(json['data']) if json.get('data', None) is not None else None,
+ AppManifestParsedProperties.from_json(json['parsed']) if json.get('parsed', None) is not None else None
)
@@ -1681,9 +1752,12 @@ def get_installability_errors() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typi
return [InstallabilityError.from_json(i) for i in json['installabilityErrors']]
+@deprecated(version="1.3")
def get_manifest_icons() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[str]]:
'''
+ Deprecated because it's not guaranteed that the returned icon is in fact the one used for PWA installation.
+ .. deprecated:: 1.3
**EXPERIMENTAL**
@@ -1693,7 +1767,7 @@ def get_manifest_icons() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Opti
'method': 'Page.getManifestIcons',
}
json = yield cmd_dict
- return str(json['primaryIcon']) if 'primaryIcon' in json else None
+ return str(json['primaryIcon']) if json.get('primaryIcon', None) is not None else None
def get_app_id() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.Optional[str], typing.Optional[str]]]:
@@ -1713,16 +1787,38 @@ def get_app_id() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing
}
json = yield cmd_dict
return (
- str(json['appId']) if 'appId' in json else None,
- str(json['recommendedId']) if 'recommendedId' in json else None
+ str(json['appId']) if json.get('appId', None) is not None else None,
+ str(json['recommendedId']) if json.get('recommendedId', None) is not None else None
)
+def get_ad_script_id(
+ frame_id: FrameId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[AdScriptId]]:
+ '''
+
+
+ **EXPERIMENTAL**
+
+ :param frame_id:
+ :returns: *(Optional)* Identifies the bottom-most script which caused the frame to be labelled as an ad. Only sent if frame is labelled as an ad and id is available.
+ '''
+ params: T_JSON_DICT = dict()
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.getAdScriptId',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return AdScriptId.from_json(json['adScriptId']) if json.get('adScriptId', None) is not None else None
+
+
@deprecated(version="1.3")
def get_cookies() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[network.Cookie]]:
'''
- Returns all browser cookies. Depending on the backend support, will return detailed cookie
- information in the ``cookies`` field.
+ Returns all browser cookies for the page and all of its subframes. Depending
+ on the backend support, will return detailed cookie information in the
+ ``cookies`` field.
.. deprecated:: 1.3
@@ -1756,9 +1852,9 @@ def get_layout_metrics() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tupl
:returns: A tuple with the following items:
- 0. **layoutViewport** - Deprecated metrics relating to the layout viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssLayoutViewport`` instead.
- 1. **visualViewport** - Deprecated metrics relating to the visual viewport. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssVisualViewport`` instead.
- 2. **contentSize** - Deprecated size of scrollable area. Can be in DP or in CSS pixels depending on the ``enable-use-zoom-for-dsf`` flag. Use ``cssContentSize`` instead.
+ 0. **layoutViewport** - Deprecated metrics relating to the layout viewport. Is in device pixels. Use ``cssLayoutViewport`` instead.
+ 1. **visualViewport** - Deprecated metrics relating to the visual viewport. Is in device pixels. Use ``cssVisualViewport`` instead.
+ 2. **contentSize** - Deprecated size of scrollable area. Is in DP. Use ``cssContentSize`` instead.
3. **cssLayoutViewport** - Metrics relating to the layout viewport in CSS pixels.
4. **cssVisualViewport** - Metrics relating to the visual viewport in CSS pixels.
5. **cssContentSize** - Size of scrollable area in CSS pixels.
@@ -1890,7 +1986,7 @@ def navigate(
:returns: A tuple with the following items:
0. **frameId** - Frame id that has navigated (or failed to navigate)
- 1. **loaderId** - *(Optional)* Loader identifier.
+ 1. **loaderId** - *(Optional)* Loader identifier. This is omitted in case of same-document navigation, as the previously committed loaderId would not change.
2. **errorText** - *(Optional)* User friendly error message, present if and only if navigation has failed.
'''
params: T_JSON_DICT = dict()
@@ -1910,8 +2006,8 @@ def navigate(
json = yield cmd_dict
return (
FrameId.from_json(json['frameId']),
- network.LoaderId.from_json(json['loaderId']) if 'loaderId' in json else None,
- str(json['errorText']) if 'errorText' in json else None
+ network.LoaderId.from_json(json['loaderId']) if json.get('loaderId', None) is not None else None,
+ str(json['errorText']) if json.get('errorText', None) is not None else None
)
@@ -1944,7 +2040,6 @@ def print_to_pdf(
margin_left: typing.Optional[float] = None,
margin_right: typing.Optional[float] = None,
page_ranges: typing.Optional[str] = None,
- ignore_invalid_page_ranges: typing.Optional[bool] = None,
header_template: typing.Optional[str] = None,
footer_template: typing.Optional[str] = None,
prefer_css_page_size: typing.Optional[bool] = None,
@@ -1963,8 +2058,7 @@ def print_to_pdf(
:param margin_bottom: *(Optional)* Bottom margin in inches. Defaults to 1cm (~0.4 inches).
:param margin_left: *(Optional)* Left margin in inches. Defaults to 1cm (~0.4 inches).
:param margin_right: *(Optional)* Right margin in inches. Defaults to 1cm (~0.4 inches).
- :param page_ranges: *(Optional)* Paper ranges to print, e.g., '1-5, 8, 11-13'. Defaults to the empty string, which means print all pages.
- :param ignore_invalid_page_ranges: *(Optional)* Whether to silently ignore invalid but successfully parsed page ranges, such as '3-2'. Defaults to false.
+ :param page_ranges: *(Optional)* Paper ranges to print, one based, e.g., '1-5, 8, 11-13'. Pages are printed in the document order, not in the order specified, and no more than once. Defaults to empty string, which implies the entire document is printed. The page numbers are quietly capped to actual page count of the document, and ranges beyond the end of the document are ignored. If this results in no pages to print, an error is reported. It is an error to specify a range with start greater than end.
:param header_template: *(Optional)* HTML template for the print header. Should be valid HTML markup with following classes used to inject printing values into them: - ```date````: formatted print date - ````title````: document title - ````url````: document location - ````pageNumber````: current page number - ````totalPages````: total pages in the document For example, ```````` would generate span containing the title.
:param footer_template: *(Optional)* HTML template for the print footer. Should use the same format as the ````headerTemplate````.
:param prefer_css_page_size: *(Optional)* Whether or not to prefer page size as defined by css. Defaults to false, in which case the content will be scaled to fit the paper size.
@@ -1997,8 +2091,6 @@ def print_to_pdf(
params['marginRight'] = margin_right
if page_ranges is not None:
params['pageRanges'] = page_ranges
- if ignore_invalid_page_ranges is not None:
- params['ignoreInvalidPageRanges'] = ignore_invalid_page_ranges
if header_template is not None:
params['headerTemplate'] = header_template
if footer_template is not None:
@@ -2014,7 +2106,7 @@ def print_to_pdf(
json = yield cmd_dict
return (
str(json['data']),
- io.StreamHandle.from_json(json['stream']) if 'stream' in json else None
+ io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None
)
@@ -2632,7 +2724,7 @@ def clear_compilation_cache() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
def set_spc_transaction_mode(
- mode: str
+ mode: AutoResponseMode
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Sets the Secure Payment Confirmation transaction mode.
@@ -2643,7 +2735,7 @@ def set_spc_transaction_mode(
:param mode:
'''
params: T_JSON_DICT = dict()
- params['mode'] = mode
+ params['mode'] = mode.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Page.setSPCTransactionMode',
'params': params,
@@ -2651,6 +2743,26 @@ def set_spc_transaction_mode(
json = yield cmd_dict
+def set_rph_registration_mode(
+ mode: AutoResponseMode
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Extensions for Custom Handlers API:
+ https://html.spec.whatwg.org/multipage/system-state.html#rph-automation
+
+ **EXPERIMENTAL**
+
+ :param mode:
+ '''
+ params: T_JSON_DICT = dict()
+ params['mode'] = mode.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Page.setRPHRegistrationMode',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def generate_test_report(
message: str,
group: typing.Optional[str] = None
@@ -2727,17 +2839,17 @@ class FileChooserOpened:
'''
#: Id of the frame containing input node.
frame_id: FrameId
- #: Input node id.
- backend_node_id: dom.BackendNodeId
#: Input mode.
mode: str
+ #: Input node id. Only present for file choosers opened via an element.
+ backend_node_id: typing.Optional[dom.BackendNodeId]
@classmethod
def from_json(cls, json: T_JSON_DICT) -> FileChooserOpened:
return cls(
frame_id=FrameId.from_json(json['frameId']),
- backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']),
- mode=str(json['mode'])
+ mode=str(json['mode']),
+ backend_node_id=dom.BackendNodeId.from_json(json['backendNodeId']) if json.get('backendNodeId', None) is not None else None
)
@@ -2759,7 +2871,7 @@ def from_json(cls, json: T_JSON_DICT) -> FrameAttached:
return cls(
frame_id=FrameId.from_json(json['frameId']),
parent_frame_id=FrameId.from_json(json['parentFrameId']),
- stack=runtime.StackTrace.from_json(json['stack']) if 'stack' in json else None
+ stack=runtime.StackTrace.from_json(json['stack']) if json.get('stack', None) is not None else None
)
@@ -3077,7 +3189,7 @@ def from_json(cls, json: T_JSON_DICT) -> JavascriptDialogOpening:
message=str(json['message']),
type_=DialogType.from_json(json['type']),
has_browser_handler=bool(json['hasBrowserHandler']),
- default_prompt=str(json['defaultPrompt']) if 'defaultPrompt' in json else None
+ default_prompt=str(json['defaultPrompt']) if json.get('defaultPrompt', None) is not None else None
)
@@ -3130,7 +3242,7 @@ def from_json(cls, json: T_JSON_DICT) -> BackForwardCacheNotUsed:
loader_id=network.LoaderId.from_json(json['loaderId']),
frame_id=FrameId.from_json(json['frameId']),
not_restored_explanations=[BackForwardCacheNotRestoredExplanation.from_json(i) for i in json['notRestoredExplanations']],
- not_restored_explanations_tree=BackForwardCacheNotRestoredExplanationTree.from_json(json['notRestoredExplanationsTree']) if 'notRestoredExplanationsTree' in json else None
+ not_restored_explanations_tree=BackForwardCacheNotRestoredExplanationTree.from_json(json['notRestoredExplanationsTree']) if json.get('notRestoredExplanationsTree', None) is not None else None
)
diff --git a/pycdp/cdp/performance_timeline.py b/pycdp/cdp/performance_timeline.py
index 408849d..e22e149 100644
--- a/pycdp/cdp/performance_timeline.py
+++ b/pycdp/cdp/performance_timeline.py
@@ -55,9 +55,9 @@ def from_json(cls, json: T_JSON_DICT) -> LargestContentfulPaint:
render_time=network.TimeSinceEpoch.from_json(json['renderTime']),
load_time=network.TimeSinceEpoch.from_json(json['loadTime']),
size=float(json['size']),
- element_id=str(json['elementId']) if 'elementId' in json else None,
- url=str(json['url']) if 'url' in json else None,
- node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None,
+ element_id=str(json['elementId']) if json.get('elementId', None) is not None else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ node_id=dom.BackendNodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None,
)
@@ -82,7 +82,7 @@ def from_json(cls, json: T_JSON_DICT) -> LayoutShiftAttribution:
return cls(
previous_rect=dom.Rect.from_json(json['previousRect']),
current_rect=dom.Rect.from_json(json['currentRect']),
- node_id=dom.BackendNodeId.from_json(json['nodeId']) if 'nodeId' in json else None,
+ node_id=dom.BackendNodeId.from_json(json['nodeId']) if json.get('nodeId', None) is not None else None,
)
@@ -161,9 +161,9 @@ def from_json(cls, json: T_JSON_DICT) -> TimelineEvent:
type_=str(json['type']),
name=str(json['name']),
time=network.TimeSinceEpoch.from_json(json['time']),
- duration=float(json['duration']) if 'duration' in json else None,
- lcp_details=LargestContentfulPaint.from_json(json['lcpDetails']) if 'lcpDetails' in json else None,
- layout_shift_details=LayoutShift.from_json(json['layoutShiftDetails']) if 'layoutShiftDetails' in json else None,
+ duration=float(json['duration']) if json.get('duration', None) is not None else None,
+ lcp_details=LargestContentfulPaint.from_json(json['lcpDetails']) if json.get('lcpDetails', None) is not None else None,
+ layout_shift_details=LayoutShift.from_json(json['layoutShiftDetails']) if json.get('layoutShiftDetails', None) is not None else None,
)
diff --git a/pycdp/cdp/profiler.py b/pycdp/cdp/profiler.py
index 0dc100f..8f7d5ab 100644
--- a/pycdp/cdp/profiler.py
+++ b/pycdp/cdp/profiler.py
@@ -58,10 +58,10 @@ def from_json(cls, json: T_JSON_DICT) -> ProfileNode:
return cls(
id_=int(json['id']),
call_frame=runtime.CallFrame.from_json(json['callFrame']),
- hit_count=int(json['hitCount']) if 'hitCount' in json else None,
- children=[int(i) for i in json['children']] if 'children' in json else None,
- deopt_reason=str(json['deoptReason']) if 'deoptReason' in json else None,
- position_ticks=[PositionTickInfo.from_json(i) for i in json['positionTicks']] if 'positionTicks' in json else None,
+ hit_count=int(json['hitCount']) if json.get('hitCount', None) is not None else None,
+ children=[int(i) for i in json['children']] if json.get('children', None) is not None else None,
+ deopt_reason=str(json['deoptReason']) if json.get('deoptReason', None) is not None else None,
+ position_ticks=[PositionTickInfo.from_json(i) for i in json['positionTicks']] if json.get('positionTicks', None) is not None else None,
)
@@ -103,8 +103,8 @@ def from_json(cls, json: T_JSON_DICT) -> Profile:
nodes=[ProfileNode.from_json(i) for i in json['nodes']],
start_time=float(json['startTime']),
end_time=float(json['endTime']),
- samples=[int(i) for i in json['samples']] if 'samples' in json else None,
- time_deltas=[int(i) for i in json['timeDeltas']] if 'timeDeltas' in json else None,
+ samples=[int(i) for i in json['samples']] if json.get('samples', None) is not None else None,
+ time_deltas=[int(i) for i in json['timeDeltas']] if json.get('timeDeltas', None) is not None else None,
)
@@ -223,81 +223,6 @@ def from_json(cls, json: T_JSON_DICT) -> ScriptCoverage:
)
-@dataclass
-class TypeObject:
- '''
- Describes a type collected during runtime.
- '''
- #: Name of a type collected with type profiling.
- name: str
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['name'] = self.name
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> TypeObject:
- return cls(
- name=str(json['name']),
- )
-
-
-@dataclass
-class TypeProfileEntry:
- '''
- Source offset and types for a parameter or return value.
- '''
- #: Source offset of the parameter or end of function for return values.
- offset: int
-
- #: The types for this parameter or return value.
- types: typing.List[TypeObject]
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['offset'] = self.offset
- json['types'] = [i.to_json() for i in self.types]
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> TypeProfileEntry:
- return cls(
- offset=int(json['offset']),
- types=[TypeObject.from_json(i) for i in json['types']],
- )
-
-
-@dataclass
-class ScriptTypeProfile:
- '''
- Type profile data collected during runtime for a JavaScript script.
- '''
- #: JavaScript script id.
- script_id: runtime.ScriptId
-
- #: JavaScript script name or url.
- url: str
-
- #: Type profile entries for parameters and return values of the functions in the script.
- entries: typing.List[TypeProfileEntry]
-
- def to_json(self) -> T_JSON_DICT:
- json: T_JSON_DICT = dict()
- json['scriptId'] = self.script_id.to_json()
- json['url'] = self.url
- json['entries'] = [i.to_json() for i in self.entries]
- return json
-
- @classmethod
- def from_json(cls, json: T_JSON_DICT) -> ScriptTypeProfile:
- return cls(
- script_id=runtime.ScriptId.from_json(json['scriptId']),
- url=str(json['url']),
- entries=[TypeProfileEntry.from_json(i) for i in json['entries']],
- )
-
-
def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
cmd_dict: T_JSON_DICT = {
@@ -383,18 +308,6 @@ def start_precise_coverage(
return float(json['timestamp'])
-def start_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Enable type profile.
-
- **EXPERIMENTAL**
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Profiler.startTypeProfile',
- }
- json = yield cmd_dict
-
-
def stop() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,Profile]:
'''
@@ -419,18 +332,6 @@ def stop_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
json = yield cmd_dict
-def stop_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
- '''
- Disable type profile. Disabling releases type profile data collected so far.
-
- **EXPERIMENTAL**
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Profiler.stopTypeProfile',
- }
- json = yield cmd_dict
-
-
def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[typing.List[ScriptCoverage], float]]:
'''
Collect coverage data for the current isolate, and resets execution counters. Precise code
@@ -451,21 +352,6 @@ def take_precise_coverage() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.T
)
-def take_type_profile() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ScriptTypeProfile]]:
- '''
- Collect type profile.
-
- **EXPERIMENTAL**
-
- :returns: Type profile for all scripts since startTypeProfile() was turned on.
- '''
- cmd_dict: T_JSON_DICT = {
- 'method': 'Profiler.takeTypeProfile',
- }
- json = yield cmd_dict
- return [ScriptTypeProfile.from_json(i) for i in json['result']]
-
-
@event_class('Profiler.consoleProfileFinished')
@dataclass
class ConsoleProfileFinished:
@@ -482,7 +368,7 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleProfileFinished:
id_=str(json['id']),
location=debugger.Location.from_json(json['location']),
profile=Profile.from_json(json['profile']),
- title=str(json['title']) if 'title' in json else None
+ title=str(json['title']) if json.get('title', None) is not None else None
)
@@ -503,7 +389,7 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleProfileStarted:
return cls(
id_=str(json['id']),
location=debugger.Location.from_json(json['location']),
- title=str(json['title']) if 'title' in json else None
+ title=str(json['title']) if json.get('title', None) is not None else None
)
diff --git a/pycdp/cdp/runtime.py b/pycdp/cdp/runtime.py
index 043a762..e9d1550 100644
--- a/pycdp/cdp/runtime.py
+++ b/pycdp/cdp/runtime.py
@@ -27,6 +27,36 @@ def __repr__(self):
return 'ScriptId({})'.format(super().__repr__())
+@dataclass
+class WebDriverValue:
+ '''
+ Represents the value serialiazed by the WebDriver BiDi specification
+ https://w3c.github.io/webdriver-bidi.
+ '''
+ type_: str
+
+ value: typing.Optional[typing.Any] = None
+
+ object_id: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['type'] = self.type_
+ if self.value is not None:
+ json['value'] = self.value
+ if self.object_id is not None:
+ json['objectId'] = self.object_id
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> WebDriverValue:
+ return cls(
+ type_=str(json['type']),
+ value=json['value'] if json.get('value', None) is not None else None,
+ object_id=str(json['objectId']) if json.get('objectId', None) is not None else None,
+ )
+
+
class RemoteObjectId(str):
'''
Unique object identifier.
@@ -84,6 +114,9 @@ class RemoteObject:
#: String representation of the object.
description: typing.Optional[str] = None
+ #: WebDriver BiDi representation of the value.
+ web_driver_value: typing.Optional[WebDriverValue] = None
+
#: Unique object identifier (for non-primitive values).
object_id: typing.Optional[RemoteObjectId] = None
@@ -105,6 +138,8 @@ def to_json(self) -> T_JSON_DICT:
json['unserializableValue'] = self.unserializable_value.to_json()
if self.description is not None:
json['description'] = self.description
+ if self.web_driver_value is not None:
+ json['webDriverValue'] = self.web_driver_value.to_json()
if self.object_id is not None:
json['objectId'] = self.object_id.to_json()
if self.preview is not None:
@@ -117,14 +152,15 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> RemoteObject:
return cls(
type_=str(json['type']),
- subtype=str(json['subtype']) if 'subtype' in json else None,
- class_name=str(json['className']) if 'className' in json else None,
- value=json['value'] if 'value' in json else None,
- unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if 'unserializableValue' in json else None,
- description=str(json['description']) if 'description' in json else None,
- object_id=RemoteObjectId.from_json(json['objectId']) if 'objectId' in json else None,
- preview=ObjectPreview.from_json(json['preview']) if 'preview' in json else None,
- custom_preview=CustomPreview.from_json(json['customPreview']) if 'customPreview' in json else None,
+ subtype=str(json['subtype']) if json.get('subtype', None) is not None else None,
+ class_name=str(json['className']) if json.get('className', None) is not None else None,
+ value=json['value'] if json.get('value', None) is not None else None,
+ unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if json.get('unserializableValue', None) is not None else None,
+ description=str(json['description']) if json.get('description', None) is not None else None,
+ web_driver_value=WebDriverValue.from_json(json['webDriverValue']) if json.get('webDriverValue', None) is not None else None,
+ object_id=RemoteObjectId.from_json(json['objectId']) if json.get('objectId', None) is not None else None,
+ preview=ObjectPreview.from_json(json['preview']) if json.get('preview', None) is not None else None,
+ custom_preview=CustomPreview.from_json(json['customPreview']) if json.get('customPreview', None) is not None else None,
)
@@ -150,7 +186,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> CustomPreview:
return cls(
header=str(json['header']),
- body_getter_id=RemoteObjectId.from_json(json['bodyGetterId']) if 'bodyGetterId' in json else None,
+ body_getter_id=RemoteObjectId.from_json(json['bodyGetterId']) if json.get('bodyGetterId', None) is not None else None,
)
@@ -196,9 +232,9 @@ def from_json(cls, json: T_JSON_DICT) -> ObjectPreview:
type_=str(json['type']),
overflow=bool(json['overflow']),
properties=[PropertyPreview.from_json(i) for i in json['properties']],
- subtype=str(json['subtype']) if 'subtype' in json else None,
- description=str(json['description']) if 'description' in json else None,
- entries=[EntryPreview.from_json(i) for i in json['entries']] if 'entries' in json else None,
+ subtype=str(json['subtype']) if json.get('subtype', None) is not None else None,
+ description=str(json['description']) if json.get('description', None) is not None else None,
+ entries=[EntryPreview.from_json(i) for i in json['entries']] if json.get('entries', None) is not None else None,
)
@@ -236,9 +272,9 @@ def from_json(cls, json: T_JSON_DICT) -> PropertyPreview:
return cls(
name=str(json['name']),
type_=str(json['type']),
- value=str(json['value']) if 'value' in json else None,
- value_preview=ObjectPreview.from_json(json['valuePreview']) if 'valuePreview' in json else None,
- subtype=str(json['subtype']) if 'subtype' in json else None,
+ value=str(json['value']) if json.get('value', None) is not None else None,
+ value_preview=ObjectPreview.from_json(json['valuePreview']) if json.get('valuePreview', None) is not None else None,
+ subtype=str(json['subtype']) if json.get('subtype', None) is not None else None,
)
@@ -261,7 +297,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> EntryPreview:
return cls(
value=ObjectPreview.from_json(json['value']),
- key=ObjectPreview.from_json(json['key']) if 'key' in json else None,
+ key=ObjectPreview.from_json(json['key']) if json.get('key', None) is not None else None,
)
@@ -331,13 +367,13 @@ def from_json(cls, json: T_JSON_DICT) -> PropertyDescriptor:
name=str(json['name']),
configurable=bool(json['configurable']),
enumerable=bool(json['enumerable']),
- value=RemoteObject.from_json(json['value']) if 'value' in json else None,
- writable=bool(json['writable']) if 'writable' in json else None,
- get=RemoteObject.from_json(json['get']) if 'get' in json else None,
- set_=RemoteObject.from_json(json['set']) if 'set' in json else None,
- was_thrown=bool(json['wasThrown']) if 'wasThrown' in json else None,
- is_own=bool(json['isOwn']) if 'isOwn' in json else None,
- symbol=RemoteObject.from_json(json['symbol']) if 'symbol' in json else None,
+ value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None,
+ writable=bool(json['writable']) if json.get('writable', None) is not None else None,
+ get=RemoteObject.from_json(json['get']) if json.get('get', None) is not None else None,
+ set_=RemoteObject.from_json(json['set']) if json.get('set', None) is not None else None,
+ was_thrown=bool(json['wasThrown']) if json.get('wasThrown', None) is not None else None,
+ is_own=bool(json['isOwn']) if json.get('isOwn', None) is not None else None,
+ symbol=RemoteObject.from_json(json['symbol']) if json.get('symbol', None) is not None else None,
)
@@ -363,7 +399,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> InternalPropertyDescriptor:
return cls(
name=str(json['name']),
- value=RemoteObject.from_json(json['value']) if 'value' in json else None,
+ value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None,
)
@@ -401,9 +437,9 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> PrivatePropertyDescriptor:
return cls(
name=str(json['name']),
- value=RemoteObject.from_json(json['value']) if 'value' in json else None,
- get=RemoteObject.from_json(json['get']) if 'get' in json else None,
- set_=RemoteObject.from_json(json['set']) if 'set' in json else None,
+ value=RemoteObject.from_json(json['value']) if json.get('value', None) is not None else None,
+ get=RemoteObject.from_json(json['get']) if json.get('get', None) is not None else None,
+ set_=RemoteObject.from_json(json['set']) if json.get('set', None) is not None else None,
)
@@ -435,9 +471,9 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> CallArgument:
return cls(
- value=json['value'] if 'value' in json else None,
- unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if 'unserializableValue' in json else None,
- object_id=RemoteObjectId.from_json(json['objectId']) if 'objectId' in json else None,
+ value=json['value'] if json.get('value', None) is not None else None,
+ unserializable_value=UnserializableValue.from_json(json['unserializableValue']) if json.get('unserializableValue', None) is not None else None,
+ object_id=RemoteObjectId.from_json(json['objectId']) if json.get('objectId', None) is not None else None,
)
@@ -496,7 +532,7 @@ def from_json(cls, json: T_JSON_DICT) -> ExecutionContextDescription:
origin=str(json['origin']),
name=str(json['name']),
unique_id=str(json['uniqueId']),
- aux_data=dict(json['auxData']) if 'auxData' in json else None,
+ aux_data=dict(json['auxData']) if json.get('auxData', None) is not None else None,
)
@@ -565,12 +601,12 @@ def from_json(cls, json: T_JSON_DICT) -> ExceptionDetails:
text=str(json['text']),
line_number=int(json['lineNumber']),
column_number=int(json['columnNumber']),
- script_id=ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None,
- url=str(json['url']) if 'url' in json else None,
- stack_trace=StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
- exception=RemoteObject.from_json(json['exception']) if 'exception' in json else None,
- execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None,
- exception_meta_data=dict(json['exceptionMetaData']) if 'exceptionMetaData' in json else None,
+ script_id=ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None,
+ url=str(json['url']) if json.get('url', None) is not None else None,
+ stack_trace=StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None,
+ exception=RemoteObject.from_json(json['exception']) if json.get('exception', None) is not None else None,
+ execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if json.get('executionContextId', None) is not None else None,
+ exception_meta_data=dict(json['exceptionMetaData']) if json.get('exceptionMetaData', None) is not None else None,
)
@@ -677,9 +713,9 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> StackTrace:
return cls(
call_frames=[CallFrame.from_json(i) for i in json['callFrames']],
- description=str(json['description']) if 'description' in json else None,
- parent=StackTrace.from_json(json['parent']) if 'parent' in json else None,
- parent_id=StackTraceId.from_json(json['parentId']) if 'parentId' in json else None,
+ description=str(json['description']) if json.get('description', None) is not None else None,
+ parent=StackTrace.from_json(json['parent']) if json.get('parent', None) is not None else None,
+ parent_id=StackTraceId.from_json(json['parentId']) if json.get('parentId', None) is not None else None,
)
@@ -719,7 +755,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> StackTraceId:
return cls(
id_=str(json['id']),
- debugger_id=UniqueDebuggerId.from_json(json['debuggerId']) if 'debuggerId' in json else None,
+ debugger_id=UniqueDebuggerId.from_json(json['debuggerId']) if json.get('debuggerId', None) is not None else None,
)
@@ -752,7 +788,7 @@ def await_promise(
json = yield cmd_dict
return (
RemoteObject.from_json(json['result']),
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -767,7 +803,9 @@ def call_function_on(
await_promise: typing.Optional[bool] = None,
execution_context_id: typing.Optional[ExecutionContextId] = None,
object_group: typing.Optional[str] = None,
- throw_on_side_effect: typing.Optional[bool] = None
+ throw_on_side_effect: typing.Optional[bool] = None,
+ unique_context_id: typing.Optional[str] = None,
+ generate_web_driver_value: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]:
'''
Calls function with given declaration on the given object. Object group of the result is
@@ -780,10 +818,12 @@ def call_function_on(
:param return_by_value: *(Optional)* Whether the result is expected to be a JSON object which should be sent by value.
:param generate_preview: **(EXPERIMENTAL)** *(Optional)* Whether preview should be generated for the result.
:param user_gesture: *(Optional)* Whether execution should be treated as initiated by user in the UI.
- :param await_promise: *(Optional)* Whether execution should ````await``` for resulting value and return once awaited promise is resolved.
+ :param await_promise: *(Optional)* Whether execution should ````await```` for resulting value and return once awaited promise is resolved.
:param execution_context_id: *(Optional)* Specifies execution context which global object will be used to call function on. Either executionContextId or objectId should be specified.
:param object_group: *(Optional)* Symbolic group name that can be used to release multiple objects. If objectGroup is not specified and objectId is, objectGroup will be inherited from object.
:param throw_on_side_effect: **(EXPERIMENTAL)** *(Optional)* Whether to throw an exception if side effect cannot be ruled out during evaluation.
+ :param unique_context_id: **(EXPERIMENTAL)** *(Optional)* An alternative way to specify the execution context to call function on. Compared to contextId that may be reused across processes, this is guaranteed to be system-unique, so it can be used to prevent accidental function call in context different than intended (e.g. as a result of navigation across process boundaries). This is mutually exclusive with ````executionContextId````.
+ :param generate_web_driver_value: **(EXPERIMENTAL)** *(Optional)* Whether the result should contain ````webDriverValue````, serialized according to https://w3c.github.io/webdriver-bidi. This is mutually exclusive with ````returnByValue````, but resulting ````objectId``` is still provided.
:returns: A tuple with the following items:
0. **result** - Call result.
@@ -811,6 +851,10 @@ def call_function_on(
params['objectGroup'] = object_group
if throw_on_side_effect is not None:
params['throwOnSideEffect'] = throw_on_side_effect
+ if unique_context_id is not None:
+ params['uniqueContextId'] = unique_context_id
+ if generate_web_driver_value is not None:
+ params['generateWebDriverValue'] = generate_web_driver_value
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.callFunctionOn',
'params': params,
@@ -818,7 +862,7 @@ def call_function_on(
json = yield cmd_dict
return (
RemoteObject.from_json(json['result']),
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -852,8 +896,8 @@ def compile_script(
}
json = yield cmd_dict
return (
- ScriptId.from_json(json['scriptId']) if 'scriptId' in json else None,
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ ScriptId.from_json(json['scriptId']) if json.get('scriptId', None) is not None else None,
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -904,7 +948,8 @@ def evaluate(
disable_breaks: typing.Optional[bool] = None,
repl_mode: typing.Optional[bool] = None,
allow_unsafe_eval_blocked_by_csp: typing.Optional[bool] = None,
- unique_context_id: typing.Optional[str] = None
+ unique_context_id: typing.Optional[str] = None,
+ generate_web_driver_value: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[RemoteObject, typing.Optional[ExceptionDetails]]]:
'''
Evaluates expression on global object.
@@ -924,6 +969,7 @@ def evaluate(
:param repl_mode: **(EXPERIMENTAL)** *(Optional)* Setting this flag to true enables ````let```` re-declaration and top-level ````await````. Note that ````let```` variables can only be re-declared if they originate from ````replMode```` themselves.
:param allow_unsafe_eval_blocked_by_csp: **(EXPERIMENTAL)** *(Optional)* The Content Security Policy (CSP) for the target might block 'unsafe-eval' which includes eval(), Function(), setTimeout() and setInterval() when called with non-callable arguments. This flag bypasses CSP for this evaluation and allows unsafe-eval. Defaults to true.
:param unique_context_id: **(EXPERIMENTAL)** *(Optional)* An alternative way to specify the execution context to evaluate in. Compared to contextId that may be reused across processes, this is guaranteed to be system-unique, so it can be used to prevent accidental evaluation of the expression in context different than intended (e.g. as a result of navigation across process boundaries). This is mutually exclusive with ````contextId```.
+ :param generate_web_driver_value: **(EXPERIMENTAL)** *(Optional)* Whether the result should be serialized according to https://w3c.github.io/webdriver-bidi.
:returns: A tuple with the following items:
0. **result** - Evaluation result.
@@ -959,6 +1005,8 @@ def evaluate(
params['allowUnsafeEvalBlockedByCSP'] = allow_unsafe_eval_blocked_by_csp
if unique_context_id is not None:
params['uniqueContextId'] = unique_context_id
+ if generate_web_driver_value is not None:
+ params['generateWebDriverValue'] = generate_web_driver_value
cmd_dict: T_JSON_DICT = {
'method': 'Runtime.evaluate',
'params': params,
@@ -966,7 +1014,7 @@ def evaluate(
json = yield cmd_dict
return (
RemoteObject.from_json(json['result']),
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -1047,9 +1095,9 @@ def get_properties(
json = yield cmd_dict
return (
[PropertyDescriptor.from_json(i) for i in json['result']],
- [InternalPropertyDescriptor.from_json(i) for i in json['internalProperties']] if 'internalProperties' in json else None,
- [PrivatePropertyDescriptor.from_json(i) for i in json['privateProperties']] if 'privateProperties' in json else None,
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ [InternalPropertyDescriptor.from_json(i) for i in json['internalProperties']] if json.get('internalProperties', None) is not None else None,
+ [PrivatePropertyDescriptor.from_json(i) for i in json['privateProperties']] if json.get('privateProperties', None) is not None else None,
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -1187,7 +1235,7 @@ def run_script(
json = yield cmd_dict
return (
RemoteObject.from_json(json['result']),
- ExceptionDetails.from_json(json['exceptionDetails']) if 'exceptionDetails' in json else None
+ ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
)
@@ -1311,6 +1359,31 @@ def remove_binding(
json = yield cmd_dict
+def get_exception_details(
+ error_object_id: RemoteObjectId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Optional[ExceptionDetails]]:
+ '''
+ This method tries to lookup and populate exception details for a
+ JavaScript Error object.
+ Note that the stackTrace portion of the resulting exceptionDetails will
+ only be populated if the Runtime domain was enabled at the time when the
+ Error was thrown.
+
+ **EXPERIMENTAL**
+
+ :param error_object_id: The error object for which to resolve the exception details.
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['errorObjectId'] = error_object_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Runtime.getExceptionDetails',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return ExceptionDetails.from_json(json['exceptionDetails']) if json.get('exceptionDetails', None) is not None else None
+
+
@event_class('Runtime.bindingCalled')
@dataclass
class BindingCalled:
@@ -1363,8 +1436,8 @@ def from_json(cls, json: T_JSON_DICT) -> ConsoleAPICalled:
args=[RemoteObject.from_json(i) for i in json['args']],
execution_context_id=ExecutionContextId.from_json(json['executionContextId']),
timestamp=Timestamp.from_json(json['timestamp']),
- stack_trace=StackTrace.from_json(json['stackTrace']) if 'stackTrace' in json else None,
- context=str(json['context']) if 'context' in json else None
+ stack_trace=StackTrace.from_json(json['stackTrace']) if json.get('stackTrace', None) is not None else None,
+ context=str(json['context']) if json.get('context', None) is not None else None
)
@@ -1429,11 +1502,14 @@ class ExecutionContextDestroyed:
'''
#: Id of the destroyed context
execution_context_id: ExecutionContextId
+ #: Unique Id of the destroyed context
+ execution_context_unique_id: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> ExecutionContextDestroyed:
return cls(
- execution_context_id=ExecutionContextId.from_json(json['executionContextId'])
+ execution_context_id=ExecutionContextId.from_json(json['executionContextId']),
+ execution_context_unique_id=str(json['executionContextUniqueId'])
)
@@ -1469,5 +1545,5 @@ def from_json(cls, json: T_JSON_DICT) -> InspectRequested:
return cls(
object_=RemoteObject.from_json(json['object']),
hints=dict(json['hints']),
- execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if 'executionContextId' in json else None
+ execution_context_id=ExecutionContextId.from_json(json['executionContextId']) if json.get('executionContextId', None) is not None else None
)
diff --git a/pycdp/cdp/security.py b/pycdp/cdp/security.py
index c8d4c98..93313fb 100644
--- a/pycdp/cdp/security.py
+++ b/pycdp/cdp/security.py
@@ -168,9 +168,9 @@ def from_json(cls, json: T_JSON_DICT) -> CertificateSecurityState:
obsolete_ssl_key_exchange=bool(json['obsoleteSslKeyExchange']),
obsolete_ssl_cipher=bool(json['obsoleteSslCipher']),
obsolete_ssl_signature=bool(json['obsoleteSslSignature']),
- key_exchange_group=str(json['keyExchangeGroup']) if 'keyExchangeGroup' in json else None,
- mac=str(json['mac']) if 'mac' in json else None,
- certificate_network_error=str(json['certificateNetworkError']) if 'certificateNetworkError' in json else None,
+ key_exchange_group=str(json['keyExchangeGroup']) if json.get('keyExchangeGroup', None) is not None else None,
+ mac=str(json['mac']) if json.get('mac', None) is not None else None,
+ certificate_network_error=str(json['certificateNetworkError']) if json.get('certificateNetworkError', None) is not None else None,
)
@@ -205,7 +205,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> SafetyTipInfo:
return cls(
safety_tip_status=SafetyTipStatus.from_json(json['safetyTipStatus']),
- safe_url=str(json['safeUrl']) if 'safeUrl' in json else None,
+ safe_url=str(json['safeUrl']) if json.get('safeUrl', None) is not None else None,
)
@@ -241,8 +241,8 @@ def from_json(cls, json: T_JSON_DICT) -> VisibleSecurityState:
return cls(
security_state=SecurityState.from_json(json['securityState']),
security_state_issue_ids=[str(i) for i in json['securityStateIssueIds']],
- certificate_security_state=CertificateSecurityState.from_json(json['certificateSecurityState']) if 'certificateSecurityState' in json else None,
- safety_tip_info=SafetyTipInfo.from_json(json['safetyTipInfo']) if 'safetyTipInfo' in json else None,
+ certificate_security_state=CertificateSecurityState.from_json(json['certificateSecurityState']) if json.get('certificateSecurityState', None) is not None else None,
+ safety_tip_info=SafetyTipInfo.from_json(json['safetyTipInfo']) if json.get('safetyTipInfo', None) is not None else None,
)
@@ -293,7 +293,7 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityStateExplanation:
description=str(json['description']),
mixed_content_type=MixedContentType.from_json(json['mixedContentType']),
certificate=[str(i) for i in json['certificate']],
- recommendations=[str(i) for i in json['recommendations']] if 'recommendations' in json else None,
+ recommendations=[str(i) for i in json['recommendations']] if json.get('recommendations', None) is not None else None,
)
@@ -516,5 +516,5 @@ def from_json(cls, json: T_JSON_DICT) -> SecurityStateChanged:
scheme_is_cryptographic=bool(json['schemeIsCryptographic']),
explanations=[SecurityStateExplanation.from_json(i) for i in json['explanations']],
insecure_content_status=InsecureContentStatus.from_json(json['insecureContentStatus']),
- summary=str(json['summary']) if 'summary' in json else None
+ summary=str(json['summary']) if json.get('summary', None) is not None else None
)
diff --git a/pycdp/cdp/service_worker.py b/pycdp/cdp/service_worker.py
index 3a4db60..a303e9f 100644
--- a/pycdp/cdp/service_worker.py
+++ b/pycdp/cdp/service_worker.py
@@ -134,10 +134,10 @@ def from_json(cls, json: T_JSON_DICT) -> ServiceWorkerVersion:
script_url=str(json['scriptURL']),
running_status=ServiceWorkerVersionRunningStatus.from_json(json['runningStatus']),
status=ServiceWorkerVersionStatus.from_json(json['status']),
- script_last_modified=float(json['scriptLastModified']) if 'scriptLastModified' in json else None,
- script_response_time=float(json['scriptResponseTime']) if 'scriptResponseTime' in json else None,
- controlled_clients=[target.TargetID.from_json(i) for i in json['controlledClients']] if 'controlledClients' in json else None,
- target_id=target.TargetID.from_json(json['targetId']) if 'targetId' in json else None,
+ script_last_modified=float(json['scriptLastModified']) if json.get('scriptLastModified', None) is not None else None,
+ script_response_time=float(json['scriptResponseTime']) if json.get('scriptResponseTime', None) is not None else None,
+ controlled_clients=[target.TargetID.from_json(i) for i in json['controlledClients']] if json.get('controlledClients', None) is not None else None,
+ target_id=target.TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None,
)
diff --git a/pycdp/cdp/storage.py b/pycdp/cdp/storage.py
index 75f1b6b..4d79f57 100644
--- a/pycdp/cdp/storage.py
+++ b/pycdp/cdp/storage.py
@@ -13,6 +13,19 @@
from . import browser
from . import network
+from . import page
+
+
+class SerializedStorageKey(str):
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> SerializedStorageKey:
+ return cls(json)
+
+ def __repr__(self):
+ return 'SerializedStorageKey({})'.format(super().__repr__())
class StorageType(enum.Enum):
@@ -29,6 +42,8 @@ class StorageType(enum.Enum):
SERVICE_WORKERS = "service_workers"
CACHE_STORAGE = "cache_storage"
INTEREST_GROUPS = "interest_groups"
+ SHARED_STORAGE = "shared_storage"
+ STORAGE_BUCKETS = "storage_buckets"
ALL_ = "all"
OTHER = "other"
@@ -96,6 +111,7 @@ class InterestGroupAccessType(enum.Enum):
JOIN = "join"
LEAVE = "leave"
UPDATE = "update"
+ LOADED = "loaded"
BID = "bid"
WIN = "win"
@@ -127,7 +143,7 @@ def to_json(self) -> T_JSON_DICT:
def from_json(cls, json: T_JSON_DICT) -> InterestGroupAd:
return cls(
render_url=str(json['renderUrl']),
- metadata=str(json['metadata']) if 'metadata' in json else None,
+ metadata=str(json['metadata']) if json.get('metadata', None) is not None else None,
)
@@ -191,14 +207,294 @@ def from_json(cls, json: T_JSON_DICT) -> InterestGroupDetails:
trusted_bidding_signals_keys=[str(i) for i in json['trustedBiddingSignalsKeys']],
ads=[InterestGroupAd.from_json(i) for i in json['ads']],
ad_components=[InterestGroupAd.from_json(i) for i in json['adComponents']],
- bidding_url=str(json['biddingUrl']) if 'biddingUrl' in json else None,
- bidding_wasm_helper_url=str(json['biddingWasmHelperUrl']) if 'biddingWasmHelperUrl' in json else None,
- update_url=str(json['updateUrl']) if 'updateUrl' in json else None,
- trusted_bidding_signals_url=str(json['trustedBiddingSignalsUrl']) if 'trustedBiddingSignalsUrl' in json else None,
- user_bidding_signals=str(json['userBiddingSignals']) if 'userBiddingSignals' in json else None,
+ bidding_url=str(json['biddingUrl']) if json.get('biddingUrl', None) is not None else None,
+ bidding_wasm_helper_url=str(json['biddingWasmHelperUrl']) if json.get('biddingWasmHelperUrl', None) is not None else None,
+ update_url=str(json['updateUrl']) if json.get('updateUrl', None) is not None else None,
+ trusted_bidding_signals_url=str(json['trustedBiddingSignalsUrl']) if json.get('trustedBiddingSignalsUrl', None) is not None else None,
+ user_bidding_signals=str(json['userBiddingSignals']) if json.get('userBiddingSignals', None) is not None else None,
+ )
+
+
+class SharedStorageAccessType(enum.Enum):
+ '''
+ Enum of shared storage access types.
+ '''
+ DOCUMENT_ADD_MODULE = "documentAddModule"
+ DOCUMENT_SELECT_URL = "documentSelectURL"
+ DOCUMENT_RUN = "documentRun"
+ DOCUMENT_SET = "documentSet"
+ DOCUMENT_APPEND = "documentAppend"
+ DOCUMENT_DELETE = "documentDelete"
+ DOCUMENT_CLEAR = "documentClear"
+ WORKLET_SET = "workletSet"
+ WORKLET_APPEND = "workletAppend"
+ WORKLET_DELETE = "workletDelete"
+ WORKLET_CLEAR = "workletClear"
+ WORKLET_GET = "workletGet"
+ WORKLET_KEYS = "workletKeys"
+ WORKLET_ENTRIES = "workletEntries"
+ WORKLET_LENGTH = "workletLength"
+ WORKLET_REMAINING_BUDGET = "workletRemainingBudget"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> SharedStorageAccessType:
+ return cls(json)
+
+
+@dataclass
+class SharedStorageEntry:
+ '''
+ Struct for a single key-value pair in an origin's shared storage.
+ '''
+ key: str
+
+ value: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['key'] = self.key
+ json['value'] = self.value
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageEntry:
+ return cls(
+ key=str(json['key']),
+ value=str(json['value']),
+ )
+
+
+@dataclass
+class SharedStorageMetadata:
+ '''
+ Details for an origin's shared storage.
+ '''
+ creation_time: network.TimeSinceEpoch
+
+ length: int
+
+ remaining_budget: float
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['creationTime'] = self.creation_time.to_json()
+ json['length'] = self.length
+ json['remainingBudget'] = self.remaining_budget
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageMetadata:
+ return cls(
+ creation_time=network.TimeSinceEpoch.from_json(json['creationTime']),
+ length=int(json['length']),
+ remaining_budget=float(json['remainingBudget']),
+ )
+
+
+@dataclass
+class SharedStorageReportingMetadata:
+ '''
+ Pair of reporting metadata details for a candidate URL for ``selectURL()``.
+ '''
+ event_type: str
+
+ reporting_url: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['eventType'] = self.event_type
+ json['reportingUrl'] = self.reporting_url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageReportingMetadata:
+ return cls(
+ event_type=str(json['eventType']),
+ reporting_url=str(json['reportingUrl']),
+ )
+
+
+@dataclass
+class SharedStorageUrlWithMetadata:
+ '''
+ Bundles a candidate URL with its reporting metadata.
+ '''
+ #: Spec of candidate URL.
+ url: str
+
+ #: Any associated reporting metadata.
+ reporting_metadata: typing.List[SharedStorageReportingMetadata]
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['url'] = self.url
+ json['reportingMetadata'] = [i.to_json() for i in self.reporting_metadata]
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageUrlWithMetadata:
+ return cls(
+ url=str(json['url']),
+ reporting_metadata=[SharedStorageReportingMetadata.from_json(i) for i in json['reportingMetadata']],
+ )
+
+
+@dataclass
+class SharedStorageAccessParams:
+ '''
+ Bundles the parameters for shared storage access events whose
+ presence/absence can vary according to SharedStorageAccessType.
+ '''
+ #: Spec of the module script URL.
+ #: Present only for SharedStorageAccessType.documentAddModule.
+ script_source_url: typing.Optional[str] = None
+
+ #: Name of the registered operation to be run.
+ #: Present only for SharedStorageAccessType.documentRun and
+ #: SharedStorageAccessType.documentSelectURL.
+ operation_name: typing.Optional[str] = None
+
+ #: The operation's serialized data in bytes (converted to a string).
+ #: Present only for SharedStorageAccessType.documentRun and
+ #: SharedStorageAccessType.documentSelectURL.
+ serialized_data: typing.Optional[str] = None
+
+ #: Array of candidate URLs' specs, along with any associated metadata.
+ #: Present only for SharedStorageAccessType.documentSelectURL.
+ urls_with_metadata: typing.Optional[typing.List[SharedStorageUrlWithMetadata]] = None
+
+ #: Key for a specific entry in an origin's shared storage.
+ #: Present only for SharedStorageAccessType.documentSet,
+ #: SharedStorageAccessType.documentAppend,
+ #: SharedStorageAccessType.documentDelete,
+ #: SharedStorageAccessType.workletSet,
+ #: SharedStorageAccessType.workletAppend,
+ #: SharedStorageAccessType.workletDelete, and
+ #: SharedStorageAccessType.workletGet.
+ key: typing.Optional[str] = None
+
+ #: Value for a specific entry in an origin's shared storage.
+ #: Present only for SharedStorageAccessType.documentSet,
+ #: SharedStorageAccessType.documentAppend,
+ #: SharedStorageAccessType.workletSet, and
+ #: SharedStorageAccessType.workletAppend.
+ value: typing.Optional[str] = None
+
+ #: Whether or not to set an entry for a key if that key is already present.
+ #: Present only for SharedStorageAccessType.documentSet and
+ #: SharedStorageAccessType.workletSet.
+ ignore_if_present: typing.Optional[bool] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.script_source_url is not None:
+ json['scriptSourceUrl'] = self.script_source_url
+ if self.operation_name is not None:
+ json['operationName'] = self.operation_name
+ if self.serialized_data is not None:
+ json['serializedData'] = self.serialized_data
+ if self.urls_with_metadata is not None:
+ json['urlsWithMetadata'] = [i.to_json() for i in self.urls_with_metadata]
+ if self.key is not None:
+ json['key'] = self.key
+ if self.value is not None:
+ json['value'] = self.value
+ if self.ignore_if_present is not None:
+ json['ignoreIfPresent'] = self.ignore_if_present
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageAccessParams:
+ return cls(
+ script_source_url=str(json['scriptSourceUrl']) if json.get('scriptSourceUrl', None) is not None else None,
+ operation_name=str(json['operationName']) if json.get('operationName', None) is not None else None,
+ serialized_data=str(json['serializedData']) if json.get('serializedData', None) is not None else None,
+ urls_with_metadata=[SharedStorageUrlWithMetadata.from_json(i) for i in json['urlsWithMetadata']] if json.get('urlsWithMetadata', None) is not None else None,
+ key=str(json['key']) if json.get('key', None) is not None else None,
+ value=str(json['value']) if json.get('value', None) is not None else None,
+ ignore_if_present=bool(json['ignoreIfPresent']) if json.get('ignoreIfPresent', None) is not None else None,
+ )
+
+
+class StorageBucketsDurability(enum.Enum):
+ RELAXED = "relaxed"
+ STRICT = "strict"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> StorageBucketsDurability:
+ return cls(json)
+
+
+@dataclass
+class StorageBucketInfo:
+ storage_key: SerializedStorageKey
+
+ id_: str
+
+ name: str
+
+ is_default: bool
+
+ expiration: network.TimeSinceEpoch
+
+ #: Storage quota (bytes).
+ quota: float
+
+ persistent: bool
+
+ durability: StorageBucketsDurability
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['storageKey'] = self.storage_key.to_json()
+ json['id'] = self.id_
+ json['name'] = self.name
+ json['isDefault'] = self.is_default
+ json['expiration'] = self.expiration.to_json()
+ json['quota'] = self.quota
+ json['persistent'] = self.persistent
+ json['durability'] = self.durability.to_json()
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> StorageBucketInfo:
+ return cls(
+ storage_key=SerializedStorageKey.from_json(json['storageKey']),
+ id_=str(json['id']),
+ name=str(json['name']),
+ is_default=bool(json['isDefault']),
+ expiration=network.TimeSinceEpoch.from_json(json['expiration']),
+ quota=float(json['quota']),
+ persistent=bool(json['persistent']),
+ durability=StorageBucketsDurability.from_json(json['durability']),
)
+def get_storage_key_for_frame(
+ frame_id: page.FrameId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SerializedStorageKey]:
+ '''
+ Returns a storage key given a frame id.
+
+ :param frame_id:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['frameId'] = frame_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getStorageKeyForFrame',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return SerializedStorageKey.from_json(json['storageKey'])
+
+
def clear_data_for_origin(
origin: str,
storage_types: str
@@ -219,6 +515,26 @@ def clear_data_for_origin(
json = yield cmd_dict
+def clear_data_for_storage_key(
+ storage_key: str,
+ storage_types: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears storage for storage key.
+
+ :param storage_key: Storage key.
+ :param storage_types: Comma separated list of StorageType to clear.
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ params['storageTypes'] = storage_types
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.clearDataForStorageKey',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def get_cookies(
browser_context_id: typing.Optional[browser.BrowserContextID] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[network.Cookie]]:
@@ -347,6 +663,23 @@ def track_cache_storage_for_origin(
json = yield cmd_dict
+def track_cache_storage_for_storage_key(
+ storage_key: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Registers storage key to be notified when an update occurs to its cache storage list.
+
+ :param storage_key: Storage key.
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.trackCacheStorageForStorageKey',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def track_indexed_db_for_origin(
origin: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -364,6 +697,23 @@ def track_indexed_db_for_origin(
json = yield cmd_dict
+def track_indexed_db_for_storage_key(
+ storage_key: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Registers storage key to be notified when an update occurs to its IndexedDB.
+
+ :param storage_key: Storage key.
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.trackIndexedDBForStorageKey',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def untrack_cache_storage_for_origin(
origin: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -381,6 +731,23 @@ def untrack_cache_storage_for_origin(
json = yield cmd_dict
+def untrack_cache_storage_for_storage_key(
+ storage_key: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Unregisters storage key from receiving notifications for cache storage.
+
+ :param storage_key: Storage key.
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.untrackCacheStorageForStorageKey',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def untrack_indexed_db_for_origin(
origin: str
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -398,6 +765,23 @@ def untrack_indexed_db_for_origin(
json = yield cmd_dict
+def untrack_indexed_db_for_storage_key(
+ storage_key: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Unregisters storage key from receiving notifications for IndexedDB.
+
+ :param storage_key: Storage key.
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.untrackIndexedDBForStorageKey',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def get_trust_tokens() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TrustTokens]]:
'''
Returns the number of stored Trust Tokens per issuer for the
@@ -479,6 +863,200 @@ def set_interest_group_tracking(
json = yield cmd_dict
+def get_shared_storage_metadata(
+ owner_origin: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,SharedStorageMetadata]:
+ '''
+ Gets metadata for an origin's shared storage.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getSharedStorageMetadata',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return SharedStorageMetadata.from_json(json['metadata'])
+
+
+def get_shared_storage_entries(
+ owner_origin: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[SharedStorageEntry]]:
+ '''
+ Gets the entries in an given origin's shared storage.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.getSharedStorageEntries',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return [SharedStorageEntry.from_json(i) for i in json['entries']]
+
+
+def set_shared_storage_entry(
+ owner_origin: str,
+ key: str,
+ value: str,
+ ignore_if_present: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Sets entry with ``key`` and ``value`` for a given origin's shared storage.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ :param key:
+ :param value:
+ :param ignore_if_present: *(Optional)* If ```ignoreIfPresent```` is included and true, then only sets the entry if ````key``` doesn't already exist.
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ params['key'] = key
+ params['value'] = value
+ if ignore_if_present is not None:
+ params['ignoreIfPresent'] = ignore_if_present
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.setSharedStorageEntry',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def delete_shared_storage_entry(
+ owner_origin: str,
+ key: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Deletes entry for ``key`` (if it exists) for a given origin's shared storage.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ :param key:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ params['key'] = key
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.deleteSharedStorageEntry',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def clear_shared_storage_entries(
+ owner_origin: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Clears all entries for a given origin's shared storage.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.clearSharedStorageEntries',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def reset_shared_storage_budget(
+ owner_origin: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Resets the budget for ``ownerOrigin`` by clearing all budget withdrawals.
+
+ **EXPERIMENTAL**
+
+ :param owner_origin:
+ '''
+ params: T_JSON_DICT = dict()
+ params['ownerOrigin'] = owner_origin
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.resetSharedStorageBudget',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_shared_storage_tracking(
+ enable: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enables/disables issuing of sharedStorageAccessed events.
+
+ **EXPERIMENTAL**
+
+ :param enable:
+ '''
+ params: T_JSON_DICT = dict()
+ params['enable'] = enable
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.setSharedStorageTracking',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def set_storage_bucket_tracking(
+ storage_key: str,
+ enable: bool
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Set tracking for a storage key's buckets.
+
+ **EXPERIMENTAL**
+
+ :param storage_key:
+ :param enable:
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ params['enable'] = enable
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.setStorageBucketTracking',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def delete_storage_bucket(
+ storage_key: str,
+ bucket_name: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Deletes the Storage Bucket with the given storage key and bucket name.
+
+ **EXPERIMENTAL**
+
+ :param storage_key:
+ :param bucket_name:
+ '''
+ params: T_JSON_DICT = dict()
+ params['storageKey'] = storage_key
+ params['bucketName'] = bucket_name
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'Storage.deleteStorageBucket',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
@event_class('Storage.cacheStorageContentUpdated')
@dataclass
class CacheStorageContentUpdated:
@@ -487,6 +1065,8 @@ class CacheStorageContentUpdated:
'''
#: Origin to update.
origin: str
+ #: Storage key to update.
+ storage_key: str
#: Name of cache in origin.
cache_name: str
@@ -494,6 +1074,7 @@ class CacheStorageContentUpdated:
def from_json(cls, json: T_JSON_DICT) -> CacheStorageContentUpdated:
return cls(
origin=str(json['origin']),
+ storage_key=str(json['storageKey']),
cache_name=str(json['cacheName'])
)
@@ -506,11 +1087,14 @@ class CacheStorageListUpdated:
'''
#: Origin to update.
origin: str
+ #: Storage key to update.
+ storage_key: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> CacheStorageListUpdated:
return cls(
- origin=str(json['origin'])
+ origin=str(json['origin']),
+ storage_key=str(json['storageKey'])
)
@@ -522,6 +1106,8 @@ class IndexedDBContentUpdated:
'''
#: Origin to update.
origin: str
+ #: Storage key to update.
+ storage_key: str
#: Database to update.
database_name: str
#: ObjectStore to update.
@@ -531,6 +1117,7 @@ class IndexedDBContentUpdated:
def from_json(cls, json: T_JSON_DICT) -> IndexedDBContentUpdated:
return cls(
origin=str(json['origin']),
+ storage_key=str(json['storageKey']),
database_name=str(json['databaseName']),
object_store_name=str(json['objectStoreName'])
)
@@ -544,11 +1131,14 @@ class IndexedDBListUpdated:
'''
#: Origin to update.
origin: str
+ #: Storage key to update.
+ storage_key: str
@classmethod
def from_json(cls, json: T_JSON_DICT) -> IndexedDBListUpdated:
return cls(
- origin=str(json['origin'])
+ origin=str(json['origin']),
+ storage_key=str(json['storageKey'])
)
@@ -571,3 +1161,57 @@ def from_json(cls, json: T_JSON_DICT) -> InterestGroupAccessed:
owner_origin=str(json['ownerOrigin']),
name=str(json['name'])
)
+
+
+@event_class('Storage.sharedStorageAccessed')
+@dataclass
+class SharedStorageAccessed:
+ '''
+ Shared storage was accessed by the associated page.
+ The following parameters are included in all events.
+ '''
+ #: Time of the access.
+ access_time: network.TimeSinceEpoch
+ #: Enum value indicating the Shared Storage API method invoked.
+ type_: SharedStorageAccessType
+ #: DevTools Frame Token for the primary frame tree's root.
+ main_frame_id: page.FrameId
+ #: Serialized origin for the context that invoked the Shared Storage API.
+ owner_origin: str
+ #: The sub-parameters warapped by ``params`` are all optional and their
+ #: presence/absence depends on ``type``.
+ params: SharedStorageAccessParams
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> SharedStorageAccessed:
+ return cls(
+ access_time=network.TimeSinceEpoch.from_json(json['accessTime']),
+ type_=SharedStorageAccessType.from_json(json['type']),
+ main_frame_id=page.FrameId.from_json(json['mainFrameId']),
+ owner_origin=str(json['ownerOrigin']),
+ params=SharedStorageAccessParams.from_json(json['params'])
+ )
+
+
+@event_class('Storage.storageBucketCreatedOrUpdated')
+@dataclass
+class StorageBucketCreatedOrUpdated:
+ bucket: StorageBucketInfo
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> StorageBucketCreatedOrUpdated:
+ return cls(
+ bucket=StorageBucketInfo.from_json(json['bucket'])
+ )
+
+
+@event_class('Storage.storageBucketDeleted')
+@dataclass
+class StorageBucketDeleted:
+ bucket_id: str
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> StorageBucketDeleted:
+ return cls(
+ bucket_id=str(json['bucketId'])
+ )
diff --git a/pycdp/cdp/system_info.py b/pycdp/cdp/system_info.py
index 4785d98..45fffd2 100644
--- a/pycdp/cdp/system_info.py
+++ b/pycdp/cdp/system_info.py
@@ -64,8 +64,8 @@ def from_json(cls, json: T_JSON_DICT) -> GPUDevice:
device_string=str(json['deviceString']),
driver_vendor=str(json['driverVendor']),
driver_version=str(json['driverVersion']),
- sub_sys_id=float(json['subSysId']) if 'subSysId' in json else None,
- revision=float(json['revision']) if 'revision' in json else None,
+ sub_sys_id=float(json['subSysId']) if json.get('subSysId', None) is not None else None,
+ revision=float(json['revision']) if json.get('revision', None) is not None else None,
)
@@ -277,8 +277,8 @@ def from_json(cls, json: T_JSON_DICT) -> GPUInfo:
video_decoding=[VideoDecodeAcceleratorCapability.from_json(i) for i in json['videoDecoding']],
video_encoding=[VideoEncodeAcceleratorCapability.from_json(i) for i in json['videoEncoding']],
image_decoding=[ImageDecodeAcceleratorCapability.from_json(i) for i in json['imageDecoding']],
- aux_attributes=dict(json['auxAttributes']) if 'auxAttributes' in json else None,
- feature_status=dict(json['featureStatus']) if 'featureStatus' in json else None,
+ aux_attributes=dict(json['auxAttributes']) if json.get('auxAttributes', None) is not None else None,
+ feature_status=dict(json['featureStatus']) if json.get('featureStatus', None) is not None else None,
)
@@ -336,6 +336,25 @@ def get_info() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.Tuple[GPUInfo,
)
+def get_feature_state(
+ feature_state: str
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,bool]:
+ '''
+ Returns information about the feature state.
+
+ :param feature_state:
+ :returns:
+ '''
+ params: T_JSON_DICT = dict()
+ params['featureState'] = feature_state
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'SystemInfo.getFeatureState',
+ 'params': params,
+ }
+ json = yield cmd_dict
+ return bool(json['featureEnabled'])
+
+
def get_process_info() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[ProcessInfo]]:
'''
Returns information about all running processes.
diff --git a/pycdp/cdp/target.py b/pycdp/cdp/target.py
index cb0b707..13bb14c 100644
--- a/pycdp/cdp/target.py
+++ b/pycdp/cdp/target.py
@@ -67,6 +67,10 @@ class TargetInfo:
browser_context_id: typing.Optional[browser.BrowserContextID] = None
+ #: Provides additional details for specific target types. For example, for
+ #: the type of "page", this may be set to "portal" or "prerender".
+ subtype: typing.Optional[str] = None
+
def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
json['targetId'] = self.target_id.to_json()
@@ -81,6 +85,8 @@ def to_json(self) -> T_JSON_DICT:
json['openerFrameId'] = self.opener_frame_id.to_json()
if self.browser_context_id is not None:
json['browserContextId'] = self.browser_context_id.to_json()
+ if self.subtype is not None:
+ json['subtype'] = self.subtype
return json
@classmethod
@@ -92,12 +98,60 @@ def from_json(cls, json: T_JSON_DICT) -> TargetInfo:
url=str(json['url']),
attached=bool(json['attached']),
can_access_opener=bool(json['canAccessOpener']),
- opener_id=TargetID.from_json(json['openerId']) if 'openerId' in json else None,
- opener_frame_id=page.FrameId.from_json(json['openerFrameId']) if 'openerFrameId' in json else None,
- browser_context_id=browser.BrowserContextID.from_json(json['browserContextId']) if 'browserContextId' in json else None,
+ opener_id=TargetID.from_json(json['openerId']) if json.get('openerId', None) is not None else None,
+ opener_frame_id=page.FrameId.from_json(json['openerFrameId']) if json.get('openerFrameId', None) is not None else None,
+ browser_context_id=browser.BrowserContextID.from_json(json['browserContextId']) if json.get('browserContextId', None) is not None else None,
+ subtype=str(json['subtype']) if json.get('subtype', None) is not None else None,
)
+@dataclass
+class FilterEntry:
+ '''
+ A filter used by target query/discovery/auto-attach operations.
+ '''
+ #: If set, causes exclusion of mathcing targets from the list.
+ exclude: typing.Optional[bool] = None
+
+ #: If not present, matches any type.
+ type_: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ if self.exclude is not None:
+ json['exclude'] = self.exclude
+ if self.type_ is not None:
+ json['type'] = self.type_
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> FilterEntry:
+ return cls(
+ exclude=bool(json['exclude']) if json.get('exclude', None) is not None else None,
+ type_=str(json['type']) if json.get('type', None) is not None else None,
+ )
+
+
+class TargetFilter(list):
+ '''
+ The entries in TargetFilter are matched sequentially against targets and
+ the first entry that matches determines if the target is included or not,
+ depending on the value of ``exclude`` field in the entry.
+ If filter is not specified, the one assumed is
+ [{type: "browser", exclude: true}, {type: "tab", exclude: true}, {}]
+ (i.e. include everything but ``browser`` and ``tab``).
+ '''
+ def to_json(self) -> typing.List[FilterEntry]:
+ return self
+
+ @classmethod
+ def from_json(cls, json: typing.List[FilterEntry]) -> TargetFilter:
+ return cls(json)
+
+ def __repr__(self):
+ return 'TargetFilter({})'.format(super().__repr__())
+
+
@dataclass
class RemoteLocation:
host: str
@@ -279,7 +333,8 @@ def create_target(
browser_context_id: typing.Optional[browser.BrowserContextID] = None,
enable_begin_frame_control: typing.Optional[bool] = None,
new_window: typing.Optional[bool] = None,
- background: typing.Optional[bool] = None
+ background: typing.Optional[bool] = None,
+ for_tab: typing.Optional[bool] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,TargetID]:
'''
Creates a new page.
@@ -291,6 +346,7 @@ def create_target(
:param enable_begin_frame_control: **(EXPERIMENTAL)** *(Optional)* Whether BeginFrames for this target will be controlled via DevTools (headless chrome only, not supported on MacOS yet, false by default).
:param new_window: *(Optional)* Whether to create a new Window or Tab (chrome-only, false by default).
:param background: *(Optional)* Whether to create the target in background or foreground (chrome-only, false by default).
+ :param for_tab: **(EXPERIMENTAL)** *(Optional)* Whether to create the target of type "tab".
:returns: The id of the page opened.
'''
params: T_JSON_DICT = dict()
@@ -307,6 +363,8 @@ def create_target(
params['newWindow'] = new_window
if background is not None:
params['background'] = background
+ if for_tab is not None:
+ params['forTab'] = for_tab
cmd_dict: T_JSON_DICT = {
'method': 'Target.createTarget',
'params': params,
@@ -379,14 +437,21 @@ def get_target_info(
return TargetInfo.from_json(json['targetInfo'])
-def get_targets() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TargetInfo]]:
+def get_targets(
+ filter_: typing.Optional[TargetFilter] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,typing.List[TargetInfo]]:
'''
Retrieves a list of available targets.
+ :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be reported. If filter is not specified and target discovery is currently enabled, a filter used for target discovery is used for consistency.
:returns: The list of targets.
'''
+ params: T_JSON_DICT = dict()
+ if filter_ is not None:
+ params['filter'] = filter_.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Target.getTargets',
+ 'params': params,
}
json = yield cmd_dict
return [TargetInfo.from_json(i) for i in json['targetInfos']]
@@ -425,7 +490,8 @@ def send_message_to_target(
def set_auto_attach(
auto_attach: bool,
wait_for_debugger_on_start: bool,
- flatten: typing.Optional[bool] = None
+ flatten: typing.Optional[bool] = None,
+ filter_: typing.Optional[TargetFilter] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Controls whether to automatically attach to new targets which are considered to be related to
@@ -439,12 +505,15 @@ def set_auto_attach(
:param auto_attach: Whether to auto-attach to related targets.
:param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets.
:param flatten: *(Optional)* Enables "flat" access to the session via specifying sessionId attribute in the commands. We plan to make this the default, deprecate non-flattened mode, and eventually retire it. See crbug.com/991325.
+ :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached.
'''
params: T_JSON_DICT = dict()
params['autoAttach'] = auto_attach
params['waitForDebuggerOnStart'] = wait_for_debugger_on_start
if flatten is not None:
params['flatten'] = flatten
+ if filter_ is not None:
+ params['filter'] = filter_.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Target.setAutoAttach',
'params': params,
@@ -454,7 +523,8 @@ def set_auto_attach(
def auto_attach_related(
target_id: TargetID,
- wait_for_debugger_on_start: bool
+ wait_for_debugger_on_start: bool,
+ filter_: typing.Optional[TargetFilter] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Adds the specified target to the list of targets that will be monitored for any related target
@@ -467,10 +537,13 @@ def auto_attach_related(
:param target_id:
:param wait_for_debugger_on_start: Whether to pause new targets when attaching to them. Use ```Runtime.runIfWaitingForDebugger``` to run paused targets.
+ :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached.
'''
params: T_JSON_DICT = dict()
params['targetId'] = target_id.to_json()
params['waitForDebuggerOnStart'] = wait_for_debugger_on_start
+ if filter_ is not None:
+ params['filter'] = filter_.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Target.autoAttachRelated',
'params': params,
@@ -479,16 +552,20 @@ def auto_attach_related(
def set_discover_targets(
- discover: bool
+ discover: bool,
+ filter_: typing.Optional[TargetFilter] = None
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Controls whether to discover available targets and notify via
``targetCreated/targetInfoChanged/targetDestroyed`` events.
:param discover: Whether to discover available targets.
+ :param filter_: **(EXPERIMENTAL)** *(Optional)* Only targets matching filter will be attached. If ```discover```` is false, ````filter``` must be omitted or empty.
'''
params: T_JSON_DICT = dict()
params['discover'] = discover
+ if filter_ is not None:
+ params['filter'] = filter_.to_json()
cmd_dict: T_JSON_DICT = {
'method': 'Target.setDiscoverTargets',
'params': params,
@@ -556,7 +633,7 @@ class DetachedFromTarget:
def from_json(cls, json: T_JSON_DICT) -> DetachedFromTarget:
return cls(
session_id=SessionID.from_json(json['sessionId']),
- target_id=TargetID.from_json(json['targetId']) if 'targetId' in json else None
+ target_id=TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None
)
@@ -578,7 +655,7 @@ def from_json(cls, json: T_JSON_DICT) -> ReceivedMessageFromTarget:
return cls(
session_id=SessionID.from_json(json['sessionId']),
message=str(json['message']),
- target_id=TargetID.from_json(json['targetId']) if 'targetId' in json else None
+ target_id=TargetID.from_json(json['targetId']) if json.get('targetId', None) is not None else None
)
diff --git a/pycdp/cdp/tracing.py b/pycdp/cdp/tracing.py
index 84eeb4b..d69a605 100644
--- a/pycdp/cdp/tracing.py
+++ b/pycdp/cdp/tracing.py
@@ -34,6 +34,10 @@ class TraceConfig:
#: Controls how the trace buffer stores data.
record_mode: typing.Optional[str] = None
+ #: Size of the trace buffer in kilobytes. If not specified or zero is passed, a default value
+ #: of 200 MB would be used.
+ trace_buffer_size_in_kb: typing.Optional[float] = None
+
#: Turns on JavaScript stack sampling.
enable_sampling: typing.Optional[bool] = None
@@ -59,6 +63,8 @@ def to_json(self) -> T_JSON_DICT:
json: T_JSON_DICT = dict()
if self.record_mode is not None:
json['recordMode'] = self.record_mode
+ if self.trace_buffer_size_in_kb is not None:
+ json['traceBufferSizeInKb'] = self.trace_buffer_size_in_kb
if self.enable_sampling is not None:
json['enableSampling'] = self.enable_sampling
if self.enable_systrace is not None:
@@ -78,14 +84,15 @@ def to_json(self) -> T_JSON_DICT:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> TraceConfig:
return cls(
- record_mode=str(json['recordMode']) if 'recordMode' in json else None,
- enable_sampling=bool(json['enableSampling']) if 'enableSampling' in json else None,
- enable_systrace=bool(json['enableSystrace']) if 'enableSystrace' in json else None,
- enable_argument_filter=bool(json['enableArgumentFilter']) if 'enableArgumentFilter' in json else None,
- included_categories=[str(i) for i in json['includedCategories']] if 'includedCategories' in json else None,
- excluded_categories=[str(i) for i in json['excludedCategories']] if 'excludedCategories' in json else None,
- synthetic_delays=[str(i) for i in json['syntheticDelays']] if 'syntheticDelays' in json else None,
- memory_dump_config=MemoryDumpConfig.from_json(json['memoryDumpConfig']) if 'memoryDumpConfig' in json else None,
+ record_mode=str(json['recordMode']) if json.get('recordMode', None) is not None else None,
+ trace_buffer_size_in_kb=float(json['traceBufferSizeInKb']) if json.get('traceBufferSizeInKb', None) is not None else None,
+ enable_sampling=bool(json['enableSampling']) if json.get('enableSampling', None) is not None else None,
+ enable_systrace=bool(json['enableSystrace']) if json.get('enableSystrace', None) is not None else None,
+ enable_argument_filter=bool(json['enableArgumentFilter']) if json.get('enableArgumentFilter', None) is not None else None,
+ included_categories=[str(i) for i in json['includedCategories']] if json.get('includedCategories', None) is not None else None,
+ excluded_categories=[str(i) for i in json['excludedCategories']] if json.get('excludedCategories', None) is not None else None,
+ synthetic_delays=[str(i) for i in json['syntheticDelays']] if json.get('syntheticDelays', None) is not None else None,
+ memory_dump_config=MemoryDumpConfig.from_json(json['memoryDumpConfig']) if json.get('memoryDumpConfig', None) is not None else None,
)
@@ -293,9 +300,9 @@ class BufferUsage:
@classmethod
def from_json(cls, json: T_JSON_DICT) -> BufferUsage:
return cls(
- percent_full=float(json['percentFull']) if 'percentFull' in json else None,
- event_count=float(json['eventCount']) if 'eventCount' in json else None,
- value=float(json['value']) if 'value' in json else None
+ percent_full=float(json['percentFull']) if json.get('percentFull', None) is not None else None,
+ event_count=float(json['eventCount']) if json.get('eventCount', None) is not None else None,
+ value=float(json['value']) if json.get('value', None) is not None else None
)
@@ -303,8 +310,8 @@ def from_json(cls, json: T_JSON_DICT) -> BufferUsage:
@dataclass
class DataCollected:
'''
- Contains an bucket of collected trace events. When tracing is stopped collected events will be
- send as a sequence of dataCollected events followed by tracingComplete event.
+ Contains a bucket of collected trace events. When tracing is stopped collected events will be
+ sent as a sequence of dataCollected events followed by tracingComplete event.
'''
value: typing.List[dict]
@@ -336,7 +343,7 @@ class TracingComplete:
def from_json(cls, json: T_JSON_DICT) -> TracingComplete:
return cls(
data_loss_occurred=bool(json['dataLossOccurred']),
- stream=io.StreamHandle.from_json(json['stream']) if 'stream' in json else None,
- trace_format=StreamFormat.from_json(json['traceFormat']) if 'traceFormat' in json else None,
- stream_compression=StreamCompression.from_json(json['streamCompression']) if 'streamCompression' in json else None
+ stream=io.StreamHandle.from_json(json['stream']) if json.get('stream', None) is not None else None,
+ trace_format=StreamFormat.from_json(json['traceFormat']) if json.get('traceFormat', None) is not None else None,
+ stream_compression=StreamCompression.from_json(json['streamCompression']) if json.get('streamCompression', None) is not None else None
)
diff --git a/pycdp/cdp/web_audio.py b/pycdp/cdp/web_audio.py
index 71dc4ce..011f52c 100644
--- a/pycdp/cdp/web_audio.py
+++ b/pycdp/cdp/web_audio.py
@@ -214,7 +214,7 @@ def from_json(cls, json: T_JSON_DICT) -> BaseAudioContext:
callback_buffer_size=float(json['callbackBufferSize']),
max_output_channel_count=float(json['maxOutputChannelCount']),
sample_rate=float(json['sampleRate']),
- realtime_data=ContextRealtimeData.from_json(json['realtimeData']) if 'realtimeData' in json else None,
+ realtime_data=ContextRealtimeData.from_json(json['realtimeData']) if json.get('realtimeData', None) is not None else None,
)
@@ -535,8 +535,8 @@ def from_json(cls, json: T_JSON_DICT) -> NodesConnected:
context_id=GraphObjectId.from_json(json['contextId']),
source_id=GraphObjectId.from_json(json['sourceId']),
destination_id=GraphObjectId.from_json(json['destinationId']),
- source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None,
- destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None
+ source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None,
+ destination_input_index=float(json['destinationInputIndex']) if json.get('destinationInputIndex', None) is not None else None
)
@@ -558,8 +558,8 @@ def from_json(cls, json: T_JSON_DICT) -> NodesDisconnected:
context_id=GraphObjectId.from_json(json['contextId']),
source_id=GraphObjectId.from_json(json['sourceId']),
destination_id=GraphObjectId.from_json(json['destinationId']),
- source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None,
- destination_input_index=float(json['destinationInputIndex']) if 'destinationInputIndex' in json else None
+ source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None,
+ destination_input_index=float(json['destinationInputIndex']) if json.get('destinationInputIndex', None) is not None else None
)
@@ -580,7 +580,7 @@ def from_json(cls, json: T_JSON_DICT) -> NodeParamConnected:
context_id=GraphObjectId.from_json(json['contextId']),
source_id=GraphObjectId.from_json(json['sourceId']),
destination_id=GraphObjectId.from_json(json['destinationId']),
- source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None
+ source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None
)
@@ -601,5 +601,5 @@ def from_json(cls, json: T_JSON_DICT) -> NodeParamDisconnected:
context_id=GraphObjectId.from_json(json['contextId']),
source_id=GraphObjectId.from_json(json['sourceId']),
destination_id=GraphObjectId.from_json(json['destinationId']),
- source_output_index=float(json['sourceOutputIndex']) if 'sourceOutputIndex' in json else None
+ source_output_index=float(json['sourceOutputIndex']) if json.get('sourceOutputIndex', None) is not None else None
)
diff --git a/pycdp/cdp/web_authn.py b/pycdp/cdp/web_authn.py
index 4cec9c2..2e2f5a1 100644
--- a/pycdp/cdp/web_authn.py
+++ b/pycdp/cdp/web_authn.py
@@ -93,6 +93,11 @@ class VirtualAuthenticatorOptions:
#: Defaults to false.
has_min_pin_length: typing.Optional[bool] = None
+ #: If set to true, the authenticator will support the prf extension.
+ #: https://w3c.github.io/webauthn/#prf-extension
+ #: Defaults to false.
+ has_prf: typing.Optional[bool] = None
+
#: If set to true, tests of user presence will succeed immediately.
#: Otherwise, they will not be resolved. Defaults to true.
automatic_presence_simulation: typing.Optional[bool] = None
@@ -117,6 +122,8 @@ def to_json(self) -> T_JSON_DICT:
json['hasCredBlob'] = self.has_cred_blob
if self.has_min_pin_length is not None:
json['hasMinPinLength'] = self.has_min_pin_length
+ if self.has_prf is not None:
+ json['hasPrf'] = self.has_prf
if self.automatic_presence_simulation is not None:
json['automaticPresenceSimulation'] = self.automatic_presence_simulation
if self.is_user_verified is not None:
@@ -128,14 +135,15 @@ def from_json(cls, json: T_JSON_DICT) -> VirtualAuthenticatorOptions:
return cls(
protocol=AuthenticatorProtocol.from_json(json['protocol']),
transport=AuthenticatorTransport.from_json(json['transport']),
- ctap2_version=Ctap2Version.from_json(json['ctap2Version']) if 'ctap2Version' in json else None,
- has_resident_key=bool(json['hasResidentKey']) if 'hasResidentKey' in json else None,
- has_user_verification=bool(json['hasUserVerification']) if 'hasUserVerification' in json else None,
- has_large_blob=bool(json['hasLargeBlob']) if 'hasLargeBlob' in json else None,
- has_cred_blob=bool(json['hasCredBlob']) if 'hasCredBlob' in json else None,
- has_min_pin_length=bool(json['hasMinPinLength']) if 'hasMinPinLength' in json else None,
- automatic_presence_simulation=bool(json['automaticPresenceSimulation']) if 'automaticPresenceSimulation' in json else None,
- is_user_verified=bool(json['isUserVerified']) if 'isUserVerified' in json else None,
+ ctap2_version=Ctap2Version.from_json(json['ctap2Version']) if json.get('ctap2Version', None) is not None else None,
+ has_resident_key=bool(json['hasResidentKey']) if json.get('hasResidentKey', None) is not None else None,
+ has_user_verification=bool(json['hasUserVerification']) if json.get('hasUserVerification', None) is not None else None,
+ has_large_blob=bool(json['hasLargeBlob']) if json.get('hasLargeBlob', None) is not None else None,
+ has_cred_blob=bool(json['hasCredBlob']) if json.get('hasCredBlob', None) is not None else None,
+ has_min_pin_length=bool(json['hasMinPinLength']) if json.get('hasMinPinLength', None) is not None else None,
+ has_prf=bool(json['hasPrf']) if json.get('hasPrf', None) is not None else None,
+ automatic_presence_simulation=bool(json['automaticPresenceSimulation']) if json.get('automaticPresenceSimulation', None) is not None else None,
+ is_user_verified=bool(json['isUserVerified']) if json.get('isUserVerified', None) is not None else None,
)
@@ -186,19 +194,27 @@ def from_json(cls, json: T_JSON_DICT) -> Credential:
is_resident_credential=bool(json['isResidentCredential']),
private_key=str(json['privateKey']),
sign_count=int(json['signCount']),
- rp_id=str(json['rpId']) if 'rpId' in json else None,
- user_handle=str(json['userHandle']) if 'userHandle' in json else None,
- large_blob=str(json['largeBlob']) if 'largeBlob' in json else None,
+ rp_id=str(json['rpId']) if json.get('rpId', None) is not None else None,
+ user_handle=str(json['userHandle']) if json.get('userHandle', None) is not None else None,
+ large_blob=str(json['largeBlob']) if json.get('largeBlob', None) is not None else None,
)
-def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+def enable(
+ enable_ui: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
'''
Enable the WebAuthn domain and start intercepting credential storage and
retrieval with a virtual authenticator.
+
+ :param enable_ui: *(Optional)* Whether to enable the WebAuthn user interface. Enabling the UI is recommended for debugging and demo purposes, as it is closer to the real experience. Disabling the UI is recommended for automated testing. Supported at the embedder's discretion if UI is available. Defaults to false.
'''
+ params: T_JSON_DICT = dict()
+ if enable_ui is not None:
+ params['enableUI'] = enable_ui
cmd_dict: T_JSON_DICT = {
'method': 'WebAuthn.enable',
+ 'params': params,
}
json = yield cmd_dict
@@ -232,6 +248,35 @@ def add_virtual_authenticator(
return AuthenticatorId.from_json(json['authenticatorId'])
+def set_response_override_bits(
+ authenticator_id: AuthenticatorId,
+ is_bogus_signature: typing.Optional[bool] = None,
+ is_bad_uv: typing.Optional[bool] = None,
+ is_bad_up: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Resets parameters isBogusSignature, isBadUV, isBadUP to false if they are not present.
+
+ :param authenticator_id:
+ :param is_bogus_signature: *(Optional)* If isBogusSignature is set, overrides the signature in the authenticator response to be zero. Defaults to false.
+ :param is_bad_uv: *(Optional)* If isBadUV is set, overrides the UV bit in the flags in the authenticator response to be zero. Defaults to false.
+ :param is_bad_up: *(Optional)* If isBadUP is set, overrides the UP bit in the flags in the authenticator response to be zero. Defaults to false.
+ '''
+ params: T_JSON_DICT = dict()
+ params['authenticatorId'] = authenticator_id.to_json()
+ if is_bogus_signature is not None:
+ params['isBogusSignature'] = is_bogus_signature
+ if is_bad_uv is not None:
+ params['isBadUV'] = is_bad_uv
+ if is_bad_up is not None:
+ params['isBadUP'] = is_bad_up
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'WebAuthn.setResponseOverrideBits',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
def remove_virtual_authenticator(
authenticator_id: AuthenticatorId
) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
@@ -388,3 +433,37 @@ def set_automatic_presence_simulation(
'params': params,
}
json = yield cmd_dict
+
+
+@event_class('WebAuthn.credentialAdded')
+@dataclass
+class CredentialAdded:
+ '''
+ Triggered when a credential is added to an authenticator.
+ '''
+ authenticator_id: AuthenticatorId
+ credential: Credential
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CredentialAdded:
+ return cls(
+ authenticator_id=AuthenticatorId.from_json(json['authenticatorId']),
+ credential=Credential.from_json(json['credential'])
+ )
+
+
+@event_class('WebAuthn.credentialAsserted')
+@dataclass
+class CredentialAsserted:
+ '''
+ Triggered when a credential is used in a webauthn assertion.
+ '''
+ authenticator_id: AuthenticatorId
+ credential: Credential
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> CredentialAsserted:
+ return cls(
+ authenticator_id=AuthenticatorId.from_json(json['authenticatorId']),
+ credential=Credential.from_json(json['credential'])
+ )
diff --git a/pycdp/gen/generate.py b/pycdp/gen/generate.py
index 5544490..baab7c3 100644
--- a/pycdp/gen/generate.py
+++ b/pycdp/gen/generate.py
@@ -1071,7 +1071,10 @@ def file_type(path: str):
for domain in domains:
logger.info('Generating module: %s → %s/%s.py', domain.domain, output, domain.module)
(output / f'{domain.module}.py').write_text(domain.generate_code())
- shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py')
+ try:
+ shutil.copyfile(Path(__file__).parent.parent / 'cdp' / 'util.py', output / 'util.py')
+ except shutil.SameFileError:
+ pass
generate_init(output / '__init__.py', domains)
(output / 'README.md').write_text(GENERATED_PACKAGE_NOTICE)
(output / 'py.typed').touch()
diff --git a/pyproject.toml b/pyproject.toml
index dbc1529..60f853f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,7 +21,7 @@ authors = [
]
[tool.poetry.dependencies]
-python = "^3.8"
+python = "^3.8, <3.11"
deprecated = "1.2.9"
inflection = "0.4.0"
aiohttp = "3.8.1"
diff --git a/update-cdp.sh b/update-cdp.sh
index 2b58e7f..2c60c08 100755
--- a/update-cdp.sh
+++ b/update-cdp.sh
@@ -1,17 +1,29 @@
#!/bin/bash
-if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then
- rm -f devtools-protocol/*
-fi
+clean_devtools_directory() {
+ if [ -d "devtools-protocol" ] && { [ -f "devtools-protocol/browser_protocol.json" ] || [ -f "devtools-protocol/js_protocol.json" ]; }; then
+ rm -f devtools-protocol/*
+ fi
+}
-wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json
-if [ $? -ne 0 ]; then
- echo "Error: Failed to download files"
- exit 1
-fi
+download_protocol_files() {
+ if ! wget -P devtools-protocol/ https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json; then
+ echo "Error: Failed to download files"
+ exit 1
+ fi
+}
-cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output cdp/
-if [ $? -ne 0 ]; then
- echo "Error: Failed to execute cdpgen"
- exit 1
-fi
\ No newline at end of file
+generate_cdp_classes() {
+ if ! cdpgen --browser-protocol devtools-protocol/browser_protocol.json --js-protocol devtools-protocol/js_protocol.json --output pycdp/cdp/; then
+ echo "Error: Failed to execute cdpgen"
+ exit 1
+ fi
+}
+
+main() {
+ clean_devtools_directory
+ download_protocol_files
+ generate_cdp_classes
+}
+
+main
From d380c3a1f4a139631538f32e366976925afa8db5 Mon Sep 17 00:00:00 2001
From: TurboKach
Date: Sat, 22 Apr 2023 02:29:31 +0800
Subject: [PATCH 65/81] deleting temp folder after update
---
pycdp/cdp/README.md | 5 +
pycdp/cdp/device_access.py | 141 ++++++++++++
pycdp/cdp/fed_cm.py | 178 +++++++++++++++
pycdp/cdp/preload.py | 443 +++++++++++++++++++++++++++++++++++++
update-cdp.sh | 8 +
5 files changed, 775 insertions(+)
create mode 100644 pycdp/cdp/README.md
create mode 100644 pycdp/cdp/device_access.py
create mode 100644 pycdp/cdp/fed_cm.py
create mode 100644 pycdp/cdp/preload.py
diff --git a/pycdp/cdp/README.md b/pycdp/cdp/README.md
new file mode 100644
index 0000000..ec8abef
--- /dev/null
+++ b/pycdp/cdp/README.md
@@ -0,0 +1,5 @@
+## Generated by PyCDP
+The modules of this package were generated by [pycdp][1], do not modify their contents because the
+changes will be overwritten in next generations.
+
+[1]: https://github.com/HMaker/python-chrome-devtools-protocol
diff --git a/pycdp/cdp/device_access.py b/pycdp/cdp/device_access.py
new file mode 100644
index 0000000..2b1c60b
--- /dev/null
+++ b/pycdp/cdp/device_access.py
@@ -0,0 +1,141 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: DeviceAccess (experimental)
+
+from __future__ import annotations
+import enum
+import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
+
+
+class RequestId(str):
+ '''
+ Device request id.
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> RequestId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'RequestId({})'.format(super().__repr__())
+
+
+class DeviceId(str):
+ '''
+ A device id.
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> DeviceId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'DeviceId({})'.format(super().__repr__())
+
+
+@dataclass
+class PromptDevice:
+ '''
+ Device information displayed in a user prompt to select a device.
+ '''
+ id_: DeviceId
+
+ #: Display name as it appears in a device request user prompt.
+ name: str
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['id'] = self.id_.to_json()
+ json['name'] = self.name
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> PromptDevice:
+ return cls(
+ id_=DeviceId.from_json(json['id']),
+ name=str(json['name']),
+ )
+
+
+def enable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Enable events in this domain.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DeviceAccess.enable',
+ }
+ json = yield cmd_dict
+
+
+def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Disable events in this domain.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DeviceAccess.disable',
+ }
+ json = yield cmd_dict
+
+
+def select_prompt(
+ id_: RequestId,
+ device_id: DeviceId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Select a device in response to a DeviceAccess.deviceRequestPrompted event.
+
+ :param id_:
+ :param device_id:
+ '''
+ params: T_JSON_DICT = dict()
+ params['id'] = id_.to_json()
+ params['deviceId'] = device_id.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DeviceAccess.selectPrompt',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def cancel_prompt(
+ id_: RequestId
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Cancel a prompt in response to a DeviceAccess.deviceRequestPrompted event.
+
+ :param id_:
+ '''
+ params: T_JSON_DICT = dict()
+ params['id'] = id_.to_json()
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'DeviceAccess.cancelPrompt',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+@event_class('DeviceAccess.deviceRequestPrompted')
+@dataclass
+class DeviceRequestPrompted:
+ '''
+ A device request opened a user prompt to select a device. Respond with the
+ selectPrompt or cancelPrompt command.
+ '''
+ id_: RequestId
+ devices: typing.List[PromptDevice]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DeviceRequestPrompted:
+ return cls(
+ id_=RequestId.from_json(json['id']),
+ devices=[PromptDevice.from_json(i) for i in json['devices']]
+ )
diff --git a/pycdp/cdp/fed_cm.py b/pycdp/cdp/fed_cm.py
new file mode 100644
index 0000000..823542b
--- /dev/null
+++ b/pycdp/cdp/fed_cm.py
@@ -0,0 +1,178 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: FedCm (experimental)
+
+from __future__ import annotations
+import enum
+import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
+
+
+class LoginState(enum.Enum):
+ '''
+ Whether this is a sign-up or sign-in action for this account, i.e.
+ whether this account has ever been used to sign in to this RP before.
+ '''
+ SIGN_IN = "SignIn"
+ SIGN_UP = "SignUp"
+
+ def to_json(self) -> str:
+ return self.value
+
+ @classmethod
+ def from_json(cls, json: str) -> LoginState:
+ return cls(json)
+
+
+@dataclass
+class Account:
+ '''
+ Corresponds to IdentityRequestAccount
+ '''
+ account_id: str
+
+ email: str
+
+ name: str
+
+ given_name: str
+
+ picture_url: str
+
+ idp_config_url: str
+
+ idp_signin_url: str
+
+ login_state: LoginState
+
+ #: These two are only set if the loginState is signUp
+ terms_of_service_url: typing.Optional[str] = None
+
+ privacy_policy_url: typing.Optional[str] = None
+
+ def to_json(self) -> T_JSON_DICT:
+ json: T_JSON_DICT = dict()
+ json['accountId'] = self.account_id
+ json['email'] = self.email
+ json['name'] = self.name
+ json['givenName'] = self.given_name
+ json['pictureUrl'] = self.picture_url
+ json['idpConfigUrl'] = self.idp_config_url
+ json['idpSigninUrl'] = self.idp_signin_url
+ json['loginState'] = self.login_state.to_json()
+ if self.terms_of_service_url is not None:
+ json['termsOfServiceUrl'] = self.terms_of_service_url
+ if self.privacy_policy_url is not None:
+ json['privacyPolicyUrl'] = self.privacy_policy_url
+ return json
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> Account:
+ return cls(
+ account_id=str(json['accountId']),
+ email=str(json['email']),
+ name=str(json['name']),
+ given_name=str(json['givenName']),
+ picture_url=str(json['pictureUrl']),
+ idp_config_url=str(json['idpConfigUrl']),
+ idp_signin_url=str(json['idpSigninUrl']),
+ login_state=LoginState.from_json(json['loginState']),
+ terms_of_service_url=str(json['termsOfServiceUrl']) if json.get('termsOfServiceUrl', None) is not None else None,
+ privacy_policy_url=str(json['privacyPolicyUrl']) if json.get('privacyPolicyUrl', None) is not None else None,
+ )
+
+
+def enable(
+ disable_rejection_delay: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param disable_rejection_delay: *(Optional)* Allows callers to disable the promise rejection delay that would normally happen, if this is unimportant to what's being tested. (step 4 of https://fedidcg.github.io/FedCM/#browser-api-rp-sign-in)
+ '''
+ params: T_JSON_DICT = dict()
+ if disable_rejection_delay is not None:
+ params['disableRejectionDelay'] = disable_rejection_delay
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'FedCm.enable',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def disable() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'FedCm.disable',
+ }
+ json = yield cmd_dict
+
+
+def select_account(
+ dialog_id: str,
+ account_index: int
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param dialog_id:
+ :param account_index:
+ '''
+ params: T_JSON_DICT = dict()
+ params['dialogId'] = dialog_id
+ params['accountIndex'] = account_index
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'FedCm.selectAccount',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def dismiss_dialog(
+ dialog_id: str,
+ trigger_cooldown: typing.Optional[bool] = None
+ ) -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ :param dialog_id:
+ :param trigger_cooldown: *(Optional)*
+ '''
+ params: T_JSON_DICT = dict()
+ params['dialogId'] = dialog_id
+ if trigger_cooldown is not None:
+ params['triggerCooldown'] = trigger_cooldown
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'FedCm.dismissDialog',
+ 'params': params,
+ }
+ json = yield cmd_dict
+
+
+def reset_cooldown() -> typing.Generator[T_JSON_DICT,T_JSON_DICT,None]:
+ '''
+ Resets the cooldown time, if any, to allow the next FedCM call to show
+ a dialog even if one was recently dismissed by the user.
+ '''
+ cmd_dict: T_JSON_DICT = {
+ 'method': 'FedCm.resetCooldown',
+ }
+ json = yield cmd_dict
+
+
+@event_class('FedCm.dialogShown')
+@dataclass
+class DialogShown:
+ dialog_id: str
+ accounts: typing.List[Account]
+ #: These exist primarily so that the caller can verify the
+ #: RP context was used appropriately.
+ title: str
+ subtitle: typing.Optional[str]
+
+ @classmethod
+ def from_json(cls, json: T_JSON_DICT) -> DialogShown:
+ return cls(
+ dialog_id=str(json['dialogId']),
+ accounts=[Account.from_json(i) for i in json['accounts']],
+ title=str(json['title']),
+ subtitle=str(json['subtitle']) if json.get('subtitle', None) is not None else None
+ )
diff --git a/pycdp/cdp/preload.py b/pycdp/cdp/preload.py
new file mode 100644
index 0000000..a36043d
--- /dev/null
+++ b/pycdp/cdp/preload.py
@@ -0,0 +1,443 @@
+# DO NOT EDIT THIS FILE!
+#
+# This file is generated from the CDP specification. If you need to make
+# changes, edit the generator and regenerate all of the modules.
+#
+# CDP domain: Preload (experimental)
+
+from __future__ import annotations
+import enum
+import typing
+from dataclasses import dataclass
+from .util import event_class, T_JSON_DICT
+
+from . import dom
+from . import network
+from . import page
+
+
+class RuleSetId(str):
+ '''
+ Unique id
+ '''
+ def to_json(self) -> str:
+ return self
+
+ @classmethod
+ def from_json(cls, json: str) -> RuleSetId:
+ return cls(json)
+
+ def __repr__(self):
+ return 'RuleSetId({})'.format(super().__repr__())
+
+
+@dataclass
+class RuleSet:
+ '''
+ Corresponds to SpeculationRuleSet
+ '''
+ id_: RuleSetId
+
+ #: Identifies a document which the rule set is associated with.
+ loader_id: network.LoaderId
+
+ #: Source text of JSON representing the rule set. If it comes from
+ #: