From 9d6fe04da62b35368de4ec047ab940e3a4e06b77 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:28:28 +0100 Subject: [PATCH 001/281] Make the client and server examples compatible. Fix #484. --- README.rst | 5 +++-- example/hello.py | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index adc3a9210..863829c33 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ Python with a focus on correctness and simplicity. Built on top of ``asyncio``, Python's standard asynchronous I/O framework, it provides an elegant coroutine-based API. -Here's a client that says "Hello world!": +Here's how a client sends and receives messages (Python ≥ 3.6): .. copy-pasted because GitHub doesn't support the include directive @@ -50,11 +50,12 @@ Here's a client that says "Hello world!": async def hello(uri): async with websockets.connect(uri) as websocket: await websocket.send("Hello world!") + await websocket.recv() asyncio.get_event_loop().run_until_complete( hello('ws://localhost:8765')) -And here's an echo server (for Python ≥ 3.6): +And here's an echo server (Python ≥ 3.6): .. code:: python diff --git a/example/hello.py b/example/hello.py index bbb3d9a0e..f90c0de55 100755 --- a/example/hello.py +++ b/example/hello.py @@ -6,6 +6,7 @@ async def hello(uri): async with websockets.connect(uri) as websocket: await websocket.send("Hello world!") + await websocket.recv() asyncio.get_event_loop().run_until_complete( hello('ws://localhost:8765')) From 82b575bdcc98e9f9702c1f53d0b7414297383bca Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:34:16 +0100 Subject: [PATCH 002/281] Make link to docs more prominent. Ref #484. --- README.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 863829c33..8c6fe1f03 100644 --- a/README.rst +++ b/README.rst @@ -72,9 +72,11 @@ And here's an echo server (Python ≥ 3.6): websockets.serve(echo, 'localhost', 8765)) asyncio.get_event_loop().run_forever() -Does that look good? `Start here`_. +Does that look good? -.. _Start here: https://websockets.readthedocs.io/en/stable/intro.html +`Start here!`_ + +.. _Start here!: https://websockets.readthedocs.io/en/stable/intro.html Why should I use ``websockets``? -------------------------------- From 59d4c2c7648c6a143923703f97a8af6c41e39e1f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:46:44 +0100 Subject: [PATCH 003/281] Point to the CoC in the README. --- README.rst | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 8c6fe1f03..b64a32abd 100644 --- a/README.rst +++ b/README.rst @@ -129,12 +129,17 @@ Why shouldn't I use ``websockets``? What else? ---------- -Bug reports, patches and suggestions welcome! Just open an issue_ or send a -`pull request`_. +Bug reports, patches and suggestions are welcome! + +Please open an issue_ or send a `pull request`_. .. _issue: https://github.com/aaugustin/websockets/issues/new .. _pull request: https://github.com/aaugustin/websockets/compare/ +Participants must uphold the `Contributor Covenant code of conduct`_. + +.. _Contributor Covenant code of conduct: https://github.com/aaugustin/websockets/blob/master/CODE_OF_CONDUCT.md + ``websockets`` is released under the `BSD license`_. .. _BSD license: https://websockets.readthedocs.io/en/stable/license.html From 2f357dbeaa6513ced67d70142d947caef294a62f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:47:51 +0100 Subject: [PATCH 004/281] Link to the LICENSE on GitHub. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index b64a32abd..b57317d19 100644 --- a/README.rst +++ b/README.rst @@ -142,4 +142,4 @@ Participants must uphold the `Contributor Covenant code of conduct`_. ``websockets`` is released under the `BSD license`_. -.. _BSD license: https://websockets.readthedocs.io/en/stable/license.html +.. _BSD license: https://github.com/aaugustin/websockets/blob/master/LICENSE From 391aa13091869cb1073e967be4295e83bd4649cc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:49:21 +0100 Subject: [PATCH 005/281] Better two months early than ten months late. --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 7101662c8..b2962adba 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2013-2017 Aymeric Augustin and contributors. +Copyright (c) 2013-2019 Aymeric Augustin and contributors. All rights reserved. Redistribution and use in source and binary forms, with or without From 9668b5bb93a7ffb738125ae8f6c1e9002bc57c13 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 16:50:56 +0100 Subject: [PATCH 006/281] Fix typo. --- docs/contributing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 21e2152c1..00a529243 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -14,7 +14,7 @@ report inappropriate behavior to aymeric DOT augustin AT fractalideas DOT com. *(If I'm the person with the inappropriate behavior, please accept my apologies. I know I can mess up. I can't expect you to tell me, but if you -chose to do so, I'll do my best to handle criticism constructively. +choose to do so, I'll do my best to handle criticism constructively. -- Aymeric)* Contributions From 5a92a1124f47b5d439ac38a607c5a47a2115d6d7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 17:12:23 +0100 Subject: [PATCH 007/281] Factor out CRLF stripping. --- src/websockets/http.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/websockets/http.py b/src/websockets/http.py index e56a4a2c5..507be0555 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -76,11 +76,10 @@ def read_request(stream): # version and because path isn't checked. Since WebSocket software tends # to implement HTTP/1.1 strictly, there's little need for lenient parsing. - # Given the implementation of read_line(), request_line ends with CRLF. request_line = yield from read_line(stream) # This may raise "ValueError: not enough values to unpack" - method, path, version = request_line[:-2].split(b' ', 2) + method, path, version = request_line.split(b' ', 2) if method != b'GET': raise ValueError("Unsupported HTTP method: %r" % method) @@ -118,11 +117,10 @@ def read_response(stream): # As in read_request, parsing is simple because a fixed value is expected # for version, status_code is a 3-digit number, and reason can be ignored. - # Given the implementation of read_line(), status_line ends with CRLF. status_line = yield from read_line(stream) # This may raise "ValueError: not enough values to unpack" - version, status_code, reason = status_line[:-2].split(b' ', 2) + version, status_code, reason = status_line.split(b' ', 2) if version != b'HTTP/1.1': raise ValueError("Unsupported HTTP version: %r" % version) @@ -157,11 +155,11 @@ def read_headers(stream): headers = Headers() for _ in range(MAX_HEADERS + 1): line = yield from read_line(stream) - if line == b'\r\n': + if line == b'': break # This may raise "ValueError: not enough values to unpack" - name, value = line[:-2].split(b':', 1) + name, value = line.split(b':', 1) if not _token_re.fullmatch(name): raise ValueError("Invalid HTTP header name: %r" % name) value = value.strip(b' \t') @@ -185,6 +183,8 @@ def read_line(stream): ``stream`` is an :class:`~asyncio.StreamReader`. + Return :class:`bytes` without CRLF. + """ # Security: this is bounded by the StreamReader's limit (default = 32kB). line = yield from stream.readline() @@ -194,7 +194,7 @@ def read_line(stream): # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 if not line.endswith(b'\r\n'): raise ValueError("Line without CRLF") - return line + return line[:-2] class MultipleValuesError(LookupError): From 82baae15dba99dc4b6d7a476be501a643c1ae1bf Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 18:04:07 +0100 Subject: [PATCH 008/281] Add debug logs of HTTP requests and responses. Fix #493. --- docs/changelog.rst | 7 ++++++- src/websockets/client.py | 11 ++++++++++- src/websockets/http.py | 9 +++++---- src/websockets/protocol.py | 6 +++--- src/websockets/server.py | 9 ++++++++- tests/test_client_server.py | 4 ++-- tests/test_http.py | 5 ++++- 7 files changed, 38 insertions(+), 13 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index eea0693e0..4b2521d05 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,11 +3,16 @@ Changelog .. currentmodule:: websockets -7.1 +8.0 ... *In development* +.. warning:: + + **Version 8.0 adds the reason phrase to the return type of the low-level + API** :func:`~http.read_response` **.** + 7.0 ... diff --git a/src/websockets/client.py b/src/websockets/client.py index 9f92f18e8..2ee654ec0 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -5,6 +5,7 @@ import asyncio import collections.abc +import logging import sys from .exceptions import ( @@ -29,6 +30,8 @@ __all__ = ['connect', 'WebSocketClientProtocol'] +logger = logging.getLogger(__name__) + class WebSocketClientProtocol(WebSocketCommonProtocol): """ @@ -66,6 +69,9 @@ def write_http_request(self, path, headers): self.path = path self.request_headers = headers + logger.debug("%s > GET %s HTTP/1.1", self.side, path) + logger.debug("%s > %r", self.side, headers) + # Since the path and headers only contain ASCII characters, # we can keep this simple. request = 'GET {path} HTTP/1.1\r\n'.format(path=path) @@ -87,10 +93,13 @@ def read_http_response(self): """ try: - status_code, headers = yield from read_response(self.reader) + status_code, reason, headers = yield from read_response(self.reader) except ValueError as exc: raise InvalidMessage("Malformed HTTP message") from exc + logger.debug("%s < HTTP/1.1 %d %s", self.side, status_code, reason) + logger.debug("%s < %r", self.side, headers) + self.response_headers = headers return status_code, self.response_headers diff --git a/src/websockets/http.py b/src/websockets/http.py index 507be0555..5062c03d7 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -85,7 +85,6 @@ def read_request(stream): raise ValueError("Unsupported HTTP method: %r" % method) if version != b'HTTP/1.1': raise ValueError("Unsupported HTTP version: %r" % version) - path = path.decode('ascii', 'surrogateescape') headers = yield from read_headers(stream) @@ -100,8 +99,9 @@ def read_response(stream): ``stream`` is an :class:`~asyncio.StreamReader`. - Return ``(status_code, headers)`` where ``status_code`` is a :class:`int` - and ``headers`` is a :class:`Headers` instance. + Return ``(status_code, reason, headers)`` where ``status_code`` is an + :class:`int`, ``reason`` is a :class:`str`, and ``headers`` is a + :class:`Headers` instance. Non-ASCII characters are represented with surrogate escapes. @@ -130,10 +130,11 @@ def read_response(stream): raise ValueError("Unsupported HTTP status code: %d" % status_code) if not _value_re.fullmatch(reason): raise ValueError("Invalid HTTP reason phrase: %r" % reason) + reason = reason.decode() headers = yield from read_headers(stream) - return status_code, headers + return status_code, reason, headers @asyncio.coroutine diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index ae87c450b..ebbf95530 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -883,7 +883,7 @@ def read_frame(self, max_size): max_size=max_size, extensions=self.extensions, ) - logger.debug("%s < %s", self.side, frame) + logger.debug("%s < %r", self.side, frame) return frame @asyncio.coroutine @@ -895,7 +895,7 @@ def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): ) frame = Frame(fin, opcode, data) - logger.debug("%s > %s", self.side, frame) + logger.debug("%s > %r", self.side, frame) frame.write(self.writer.write, mask=self.is_client, extensions=self.extensions) # Backport of https://github.com/python/asyncio/pull/280. @@ -1139,7 +1139,7 @@ def fail_connection(self, code=1006, reason=''): logger.debug("%s - state = CLOSING", self.side) frame = Frame(True, OP_CLOSE, frame_data) - logger.debug("%s > %s", self.side, frame) + logger.debug("%s > %r", self.side, frame) frame.write( self.writer.write, mask=self.is_client, extensions=self.extensions ) diff --git a/src/websockets/server.py b/src/websockets/server.py index 556c270d4..5465ccd7e 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -214,10 +214,13 @@ def read_http_request(self): except ValueError as exc: raise InvalidMessage("Malformed HTTP message") from exc + logger.debug("%s < GET %s HTTP/1.1", self.side, path) + logger.debug("%s < %r", self.side, headers) + self.path = path self.request_headers = headers - return path, self.request_headers + return path, headers @asyncio.coroutine def write_http_response(self, status, headers, body=None): @@ -229,6 +232,9 @@ def write_http_response(self, status, headers, body=None): """ self.response_headers = headers + logger.debug("%s > HTTP/1.1 %d %s", self.side, status.value, status.phrase) + logger.debug("%s > %r", self.side, headers) + # Since the status line and headers only contain ASCII characters, # we can keep this simple. response = 'HTTP/1.1 {status.value} {status.phrase}\r\n'.format(status=status) @@ -237,6 +243,7 @@ def write_http_response(self, status, headers, body=None): self.writer.write(response.encode()) if body is not None: + logger.debug("%s > Body (%d bytes)", self.side, len(body)) self.writer.write(body) @asyncio.coroutine diff --git a/tests/test_client_server.py b/tests/test_client_server.py index dee44a662..0d6ee144d 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -879,8 +879,8 @@ def wrong_build_response(headers, key): def test_server_does_not_switch_protocols(self, _read_response): @asyncio.coroutine def wrong_read_response(stream): - status_code, headers = yield from read_response(stream) - return 400, headers + status_code, reason, headers = yield from read_response(stream) + return 400, 'Bad Request', headers _read_response.side_effect = wrong_read_response diff --git a/tests/test_http.py b/tests/test_http.py index b18e24a26..c222b370f 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -43,8 +43,11 @@ def test_read_response(self): b'Sec-WebSocket-Protocol: chat\r\n' b'\r\n' ) - status_code, headers = self.loop.run_until_complete(read_response(self.stream)) + status_code, reason, headers = self.loop.run_until_complete( + read_response(self.stream) + ) self.assertEqual(status_code, 101) + self.assertEqual(reason, 'Switching Protocols') self.assertEqual(headers['Upgrade'], 'websocket') def test_request_method(self): From 10b16ab82cccb7651a22597e3b8be2c61705889b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 19:22:08 +0100 Subject: [PATCH 009/281] Shorten debug logs a bit. --- src/websockets/protocol.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index ebbf95530..eb34c9174 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1106,11 +1106,10 @@ def fail_connection(self, code=1006, reason=''): """ logger.debug( - "%s ! failing WebSocket connection in the %s state: %d %s", + "%s ! failing %s WebSocket connection with code %d", self.side, self.state.name, code, - reason or '[no reason]', ) # Cancel transfer_data_task if the opening handshake succeeded. From 6e315128b575de9240a4e3603ba3c9c095e0edd0 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 20:42:33 +0100 Subject: [PATCH 010/281] Make write_http_request/response synchronous. They make small writes early in the lifetime of the connection so they're extremely unlikely to require draining the write buffer. --- src/websockets/client.py | 3 +-- src/websockets/server.py | 5 ++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 2ee654ec0..2de160e9c 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -60,7 +60,6 @@ def __init__( self.extra_headers = extra_headers super().__init__(**kwds) - @asyncio.coroutine def write_http_request(self, path, headers): """ Write request line and headers to the HTTP request. @@ -287,7 +286,7 @@ def handshake( request_headers.setdefault('User-Agent', USER_AGENT) - yield from self.write_http_request(wsuri.resource_name, request_headers) + self.write_http_request(wsuri.resource_name, request_headers) status_code, response_headers = yield from self.read_http_response() diff --git a/src/websockets/server.py b/src/websockets/server.py index 5465ccd7e..fd3ecf30e 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -160,7 +160,7 @@ def handler(self): headers.setdefault('Content-Type', 'text/plain') headers.setdefault('Connection', 'close') - yield from self.write_http_response(status, headers, body) + self.write_http_response(status, headers, body) self.fail_connection() yield from self.wait_closed() return @@ -222,7 +222,6 @@ def read_http_request(self): return path, headers - @asyncio.coroutine def write_http_response(self, status, headers, body=None): """ Write status line and headers to the HTTP response. @@ -524,7 +523,7 @@ def handshake( response_headers.setdefault('Date', email.utils.formatdate(usegmt=True)) response_headers.setdefault('Server', USER_AGENT) - yield from self.write_http_response(SWITCHING_PROTOCOLS, response_headers) + self.write_http_response(SWITCHING_PROTOCOLS, response_headers) self.connection_open() From 4f1a14c341df27338460db97ea6376571dc3ada7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 21:33:26 +0100 Subject: [PATCH 011/281] Declare process_request as function by default. This keeps things simple. --- src/websockets/server.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index fd3ecf30e..b42068764 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -245,14 +245,13 @@ def write_http_response(self, status, headers, body=None): logger.debug("%s > Body (%d bytes)", self.side, len(body)) self.writer.write(body) - @asyncio.coroutine def process_request(self, path, request_headers): """ Intercept the HTTP request and return an HTTP response if needed. ``request_headers`` is a :class:`~websockets.http.Headers` instance. - If this coroutine returns ``None``, the WebSocket handshake continues. + If this method returns ``None``, the WebSocket handshake continues. If it returns a status code, headers and a response body, that HTTP response is sent and the connection is closed. @@ -271,12 +270,12 @@ def process_request(self, path, request_headers): different status, for example to authenticate the request and return ``HTTPStatus.UNAUTHORIZED`` or ``HTTPStatus.FORBIDDEN``. - It is declared as a coroutine because such authentication checks are - likely to require network requests. + It can be declared as a function or as a coroutine because such + authentication checks are likely to require network requests. - This coroutine may be overridden by passing a ``process_request`` - argument to the :class:`WebSocketServerProtocol` constructor or the - :func:`serve` function. + It may also be overridden by passing a ``process_request`` argument to + the :class:`WebSocketServerProtocol` constructor or the :func:`serve` + function. """ From 771a5f2d1f1c873ea09a7a1191529a93f3f21846 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 1 Nov 2018 21:38:33 +0100 Subject: [PATCH 012/281] Return 503 on server shutdown during handshake. That was a regression in 71c4db9c. Fix #499. Ref #483. --- src/websockets/exceptions.py | 8 -------- src/websockets/server.py | 33 +++++++++++++++++++++------------ tests/test_client_server.py | 5 ++++- 3 files changed, 25 insertions(+), 21 deletions(-) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index b1618fa73..b34a2c0dc 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -1,6 +1,5 @@ __all__ = [ 'AbortHandshake', - 'CancelHandshake', 'ConnectionClosed', 'DuplicateParameter', 'InvalidHandshake', @@ -44,13 +43,6 @@ def __init__(self, status, headers, body=b''): super().__init__(message) -class CancelHandshake(InvalidHandshake): - """ - Exception raised to cancel a handshake when the connection is closed. - - """ - - class InvalidMessage(InvalidHandshake): """ Exception raised when the HTTP message in a handshake request is malformed. diff --git a/src/websockets/server.py b/src/websockets/server.py index b42068764..1d88e73a1 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -14,13 +14,13 @@ BAD_REQUEST, FORBIDDEN, INTERNAL_SERVER_ERROR, + SERVICE_UNAVAILABLE, SWITCHING_PROTOCOLS, UPGRADE_REQUIRED, asyncio_ensure_future, ) from .exceptions import ( AbortHandshake, - CancelHandshake, InvalidHandshake, InvalidHeader, InvalidMessage, @@ -119,10 +119,6 @@ def handler(self): except ConnectionError: logger.debug("Connection error in opening handshake", exc_info=True) raise - except CancelHandshake: - self.fail_connection() - yield from self.wait_closed() - return except Exception as exc: if isinstance(exc, AbortHandshake): status, headers, body = exc.status, exc.headers, exc.body @@ -478,11 +474,9 @@ def handshake( else: early_response = self.process_request(path, request_headers) - # Give up immediately and don't attempt to write a HTTP response if - # the TCP connection was closed while process_request() was running. - # This happens if the server shuts down and calls fail_connection(). - if self.state != State.CONNECTING: - raise CancelHandshake() + # Change the response to a 503 error if the server is shutting down. + if not self.ws_server.is_serving(): + early_response = SERVICE_UNAVAILABLE, [], b"Server is shutting down.\n" if early_response is not None: raise AbortHandshake(*early_response) @@ -593,6 +587,16 @@ def unregister(self, protocol): """ self.websockets.remove(protocol) + def is_serving(self): + """ + Tell whether the server is accepting new connections or shutting down. + + """ + try: + return self.server.is_serving() # Python ≥ 3.7 + except AttributeError: # pragma: no cover + return self.server.sockets is not None # Python < 3.7 + def close(self): """ Close the server and terminate connections with close code 1001. @@ -626,7 +630,8 @@ def _close(self): # Close open connections. fail_connection() will cancel the transfer # data task, which is expected to cause the handler task to terminate. for websocket in self.websockets: - websocket.fail_connection(1001) + if websocket.state is State.OPEN: + websocket.fail_connection(1001) # asyncio.wait doesn't accept an empty first argument. if self.websockets: @@ -637,7 +642,11 @@ def _close(self): # and let the handler wait for the connection to close. yield from asyncio.wait( [websocket.handler_task for websocket in self.websockets] - + [websocket.close_connection_task for websocket in self.websockets], + + [ + websocket.close_connection_task + for websocket in self.websockets + if websocket.state is State.OPEN + ], loop=self.loop, ) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 0d6ee144d..73866ff63 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -935,8 +935,11 @@ def test_client_closes_connection_before_handshake(self, handshake): @with_server(create_protocol=SlowServerProtocol) def test_server_shuts_down_during_opening_handshake(self): self.loop.call_later(5 * MS, self.server.close) - with self.assertRaises(InvalidHandshake): + with self.assertRaises(InvalidStatusCode) as raised: self.start_client() + exception = raised.exception + self.assertEqual(str(exception), "Status code not 101: 503") + self.assertEqual(exception.status_code, 503) @with_server() def test_server_shuts_down_during_connection_handling(self): From 9329ef30f4af2c6720ca17aa3980fe59ac52efce Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 4 Nov 2018 21:25:39 +0100 Subject: [PATCH 013/281] Fix formatting in changelog. --- docs/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4b2521d05..393abf1f8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -29,7 +29,7 @@ Changelog .. warning:: **Version 7.0 changes how a server terminates connections when it's - closed with :meth:`~websockets.server.WebSocketServer.close`.** + closed with** :meth:`~websockets.server.WebSocketServer.close` **.** Previously, connections handlers were canceled. Now, connections are closed with close code 1001 (going away). From the perspective of the From 0d3c7411f62af0d407426fa028011ad65a845e8d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 4 Nov 2018 10:38:38 +0100 Subject: [PATCH 014/281] Handle bytearray like bytes. Ref #478. --- src/websockets/framing.py | 16 ++++++---- src/websockets/protocol.py | 4 +-- src/websockets/speedups.c | 61 +++++++++++++++++++++++++++++++++++--- src/websockets/utils.py | 7 ++++- tests/test_framing.py | 9 +++++- tests/test_protocol.py | 20 +++++++++++++ tests/test_utils.py | 19 +++++++----- 7 files changed, 114 insertions(+), 22 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 00a24d807..850e7e7e2 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -237,18 +237,22 @@ def check(frame): def encode_data(data): """ - Helper that converts :class:`str` or :class:`bytes` to :class:`bytes`. + Convert a string or byte-like object to bytes. - :class:`str` are encoded with UTF-8. + If ``data`` is a :class:`str`, return a :class:`bytes` object encoding + ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return a :class:`bytes` object. + + Raise :exc:`TypeError` for other inputs. """ - # Expect str or bytes, return bytes. if isinstance(data, str): return data.encode('utf-8') - elif isinstance(data, bytes): - return data + elif isinstance(data, collections.abc.ByteString): + return bytes(data) else: - raise TypeError("data must be bytes or str") + raise TypeError("data must be bytes-like or str") def parse_close(data): diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index eb34c9174..7af86133f 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -466,7 +466,7 @@ def send(self, data): if isinstance(data, str): yield from self.write_frame(True, OP_TEXT, data.encode('utf-8')) - elif isinstance(data, bytes): + elif isinstance(data, collections.abc.ByteString): yield from self.write_frame(True, OP_BINARY, data) # Fragmented message -- regular iterator. @@ -483,7 +483,7 @@ def send(self, data): if isinstance(data, str): yield from self.write_frame(False, OP_TEXT, data.encode('utf-8')) encode_data = True - elif isinstance(data, bytes): + elif isinstance(data, collections.abc.ByteString): yield from self.write_frame(False, OP_BINARY, data) encode_data = False else: diff --git a/src/websockets/speedups.c b/src/websockets/speedups.c index 4d7622231..bb9c7053f 100644 --- a/src/websockets/speedups.c +++ b/src/websockets/speedups.c @@ -10,16 +10,50 @@ static const Py_ssize_t MASK_LEN = 4; +/* Similar to PyBytes_AsStringAndSize, but accepts more types */ + +static int +_PyBytesLike_AsStringAndSize(PyObject *obj, char **buffer, Py_ssize_t *length) +{ + if (PyBytes_Check(obj)) + { + *buffer = PyBytes_AS_STRING(obj); + *length = PyBytes_GET_SIZE(obj); + } + else if (PyByteArray_Check(obj)) + { + *buffer = PyByteArray_AS_STRING(obj); + *length = PyByteArray_GET_SIZE(obj); + } + else + { + PyErr_Format( + PyExc_TypeError, + "expected a bytes-like object, %.200s found", + Py_TYPE(obj)->tp_name); + return -1; + } + + return 0; +} + +/* C implementation of websockets.utils.apply_mask */ + static PyObject * apply_mask(PyObject *self, PyObject *args, PyObject *kwds) { - // Inputs are treated as immutable, which causes an extra memory copy. + // In order to support bytes and bytearray, accept any Python object. static char *kwlist[] = {"data", "mask", NULL}; - const char *input; + PyObject *input_obj; + PyObject *mask_obj; + + // A pointer to the underlying char * will be extracted from these inputs. + + char *input; Py_ssize_t input_len; - const char *mask; + char *mask; Py_ssize_t mask_len; // Initialize a PyBytesObject then get a pointer to the underlying char * @@ -27,10 +61,25 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) PyObject *result; char *output; + + // Other variables. + Py_ssize_t i = 0; + // Parse inputs. + if (!PyArg_ParseTupleAndKeywords( - args, kwds, "y#y#", kwlist, &input, &input_len, &mask, &mask_len)) + args, kwds, "OO", kwlist, &input_obj, &mask_obj)) + { + return NULL; + } + + if (_PyBytesLike_AsStringAndSize(input_obj, &input, &input_len) == -1) + { + return NULL; + } + + if (_PyBytesLike_AsStringAndSize(mask_obj, &mask, &mask_len) == -1) { return NULL; } @@ -41,6 +90,8 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) return NULL; } + // Create output. + result = PyBytes_FromStringAndSize(NULL, input_len); if (result == NULL) { @@ -50,6 +101,8 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) // Since we juste created result, we don't need error checks. output = PyBytes_AS_STRING(result); + // Perform the masking operation. + // Apparently GCC cannot figure out the following optimizations by itself. // We need a new scope for MSVC 2010 (non C99 friendly) diff --git a/src/websockets/utils.py b/src/websockets/utils.py index b4083dff4..def997841 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -6,9 +6,14 @@ def apply_mask(data, mask): """ - Apply masking to websocket message. + Apply masking to the data of a WebSocket message. + + ``data`` and ``mask`` are bytes-like objects. + + Return :class:`bytes`. """ if len(mask) != 4: raise ValueError("mask must contain 4 bytes") + return bytes(b ^ m for b, m in zip(data, itertools.cycle(mask))) diff --git a/tests/test_framing.py b/tests/test_framing.py index 9da64f14c..ae5acc1a6 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -158,7 +158,14 @@ def test_encode_data_str(self): def test_encode_data_bytes(self): self.assertEqual(encode_data(b'tea'), b'tea') - def test_encode_data_other(self): + def test_encode_data_bytearray(self): + self.assertEqual(encode_data(bytearray(b'tea')), b'tea') + + def test_encode_data_list(self): + with self.assertRaises(TypeError): + encode_data([]) + + def test_encode_data_none(self): with self.assertRaises(TypeError): encode_data(None) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index aee3289ea..c546e4e48 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -535,6 +535,10 @@ def test_send_binary(self): self.loop.run_until_complete(self.protocol.send(b'tea')) self.assertOneFrameSent(True, OP_BINARY, b'tea') + def test_send_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.send(bytearray(b'tea'))) + self.assertOneFrameSent(True, OP_BINARY, b'tea') + def test_send_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.send(42)) @@ -554,6 +558,14 @@ def test_send_iterable_binary(self): (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') ) + def test_send_iterable_binary_from_bytearray(self): + self.loop.run_until_complete( + self.protocol.send([bytearray(b'te'), bytearray(b'a')]) + ) + self.assertFramesSent( + (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + ) + def test_send_empty_iterable(self): self.loop.run_until_complete(self.protocol.send([])) self.assertNoFrameSent() @@ -616,6 +628,10 @@ def test_ping_binary(self): self.loop.run_until_complete(self.protocol.ping(b'tea')) self.assertOneFrameSent(True, OP_PING, b'tea') + def test_ping_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.ping(bytearray(b'tea'))) + self.assertOneFrameSent(True, OP_PING, b'tea') + def test_ping_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.ping(42)) @@ -661,6 +677,10 @@ def test_pong_binary(self): self.loop.run_until_complete(self.protocol.pong(b'tea')) self.assertOneFrameSent(True, OP_PONG, b'tea') + def test_pong_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.pong(bytearray(b'tea'))) + self.assertOneFrameSent(True, OP_PONG, b'tea') + def test_pong_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.pong(42)) diff --git a/tests/test_utils.py b/tests/test_utils.py index c7699232e..d2573e235 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,4 @@ +import itertools import unittest from websockets.utils import apply_mask as py_apply_mask @@ -9,14 +10,16 @@ def apply_mask(*args, **kwargs): return py_apply_mask(*args, **kwargs) def test_apply_mask(self): - for data_in, mask, data_out in [ - (b'', b'1234', b''), - (b'aBcDe', b'\x00\x00\x00\x00', b'aBcDe'), - (b'abcdABCD', b'1234', b'PPPPpppp'), - (b'abcdABCD' * 10, b'1234', b'PPPPpppp' * 10), - ]: - with self.subTest(data_in=data_in, mask=mask): - self.assertEqual(self.apply_mask(data_in, mask), data_out) + for data_type, mask_type in itertools.product([bytes, bytearray], repeat=2): + for data_in, mask, data_out in [ + (b'', b'1234', b''), + (b'aBcDe', b'\x00\x00\x00\x00', b'aBcDe'), + (b'abcdABCD', b'1234', b'PPPPpppp'), + (b'abcdABCD' * 10, b'1234', b'PPPPpppp' * 10), + ]: + data_in, mask = data_type(data_in), mask_type(mask) + with self.subTest(data_in=data_in, mask=mask): + self.assertEqual(self.apply_mask(data_in, mask), data_out) def test_apply_mask_check_input_types(self): for data_in, mask in [(None, None), (b'abcd', None), (None, b'abcd')]: From 5897ee913650efdcdcf2c2c98b9f74c9b605e83a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 4 Nov 2018 11:35:34 +0100 Subject: [PATCH 015/281] Factor out logic for encoding data. This is slightly different for data frames and control frames. --- src/websockets/framing.py | 26 ++++++++++++++++++++++++++ src/websockets/protocol.py | 36 ++++++++++++++++-------------------- tests/test_framing.py | 19 +++++++++++++++++++ 3 files changed, 61 insertions(+), 20 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 850e7e7e2..3e3f9386d 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -34,6 +34,7 @@ 'OP_PING', 'OP_PONG', 'Frame', + 'prepare_data', 'encode_data', 'parse_close', 'serialize_close', @@ -235,10 +236,35 @@ def check(frame): raise WebSocketProtocolError("Invalid opcode: {}".format(frame.opcode)) +def prepare_data(data): + """ + Convert a string or byte-like object to an opcode and a bytes-like object. + + This function is designed for data frames. + + If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes` + object encoding ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like + object. + + Raise :exc:`TypeError` for other inputs. + + """ + if isinstance(data, str): + return OP_TEXT, data.encode('utf-8') + elif isinstance(data, collections.abc.ByteString): + return OP_BINARY, data + else: + raise TypeError("data must be bytes-like or str") + + def encode_data(data): """ Convert a string or byte-like object to bytes. + This function is designed for ping and pong frames. + If ``data`` is a :class:`str`, return a :class:`bytes` object encoding ``data`` in UTF-8. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 7af86133f..13a370aca 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -461,17 +461,21 @@ def send(self, data): """ yield from self.ensure_open() - # Unfragmented message (first because str and bytes are iterable). + # Unfragmented message -- this case must be handled first because + # strings and bytes-like objects are iterable. - if isinstance(data, str): - yield from self.write_frame(True, OP_TEXT, data.encode('utf-8')) - - elif isinstance(data, collections.abc.ByteString): - yield from self.write_frame(True, OP_BINARY, data) + try: + opcode, data = prepare_data(data) + except TypeError: + # Perhaps data is an iterator, see below. + pass + else: + yield from self.write_frame(True, opcode, data) + return # Fragmented message -- regular iterator. - elif isinstance(data, collections.abc.Iterable): + if isinstance(data, collections.abc.Iterable): iter_data = iter(data) # First fragment. @@ -479,29 +483,21 @@ def send(self, data): data = next(iter_data) except StopIteration: return - data_type = type(data) - if isinstance(data, str): - yield from self.write_frame(False, OP_TEXT, data.encode('utf-8')) - encode_data = True - elif isinstance(data, collections.abc.ByteString): - yield from self.write_frame(False, OP_BINARY, data) - encode_data = False - else: - raise TypeError("data must be an iterable of bytes or str") + opcode, data = prepare_data(data) + yield from self.write_frame(False, opcode, data) # Other fragments. for data in iter_data: - if type(data) != data_type: + confirm_opcode, data = prepare_data(data) + if confirm_opcode != opcode: # We're half-way through a fragmented message and we can't # complete it. This makes the connection unusable. self.fail_connection(1011) raise TypeError("data contains inconsistent types") - if encode_data: - data = data.encode('utf-8') yield from self.write_frame(False, OP_CONT, data) # Final fragment. - yield from self.write_frame(True, OP_CONT, type(data)()) + yield from self.write_frame(True, OP_CONT, b'') # Fragmented message -- asynchronous iterator diff --git a/tests/test_framing.py b/tests/test_framing.py index ae5acc1a6..570fe3bdf 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -152,6 +152,25 @@ def test_control_frame_max_length(self): with self.assertRaises(WebSocketProtocolError): self.decode(b'\x88\x7e\x00\x7e' + 126 * b'a') + def test_prepare_data_str(self): + self.assertEqual(prepare_data('café'), (OP_TEXT, b'caf\xc3\xa9')) + + def test_prepare_data_bytes(self): + self.assertEqual(prepare_data(b'tea'), (OP_BINARY, b'tea')) + + def test_prepare_data_bytearray(self): + self.assertEqual( + prepare_data(bytearray(b'tea')), (OP_BINARY, bytearray(b'tea')) + ) + + def test_prepare_data_list(self): + with self.assertRaises(TypeError): + prepare_data([]) + + def test_prepare_data_none(self): + with self.assertRaises(TypeError): + prepare_data(None) + def test_encode_data_str(self): self.assertEqual(encode_data('café'), b'caf\xc3\xa9') From 6a8c8332838ef9814b773a4752aa80f7bca42d96 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 4 Nov 2018 17:45:47 +0100 Subject: [PATCH 016/281] Support memoryview objects like bytes. Minimize memory copies when they're C-contiguous. Fix #478. --- src/websockets/framing.py | 7 +++++ src/websockets/speedups.c | 27 +++++++++++++++++-- tests/test_framing.py | 14 ++++++++++ tests/test_protocol.py | 40 +++++++++++++++++++++++++++ tests/test_utils.py | 57 +++++++++++++++++++++++++++++++++------ 5 files changed, 135 insertions(+), 10 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 3e3f9386d..feebd3983 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -255,6 +255,11 @@ def prepare_data(data): return OP_TEXT, data.encode('utf-8') elif isinstance(data, collections.abc.ByteString): return OP_BINARY, data + elif isinstance(data, memoryview): + if data.c_contiguous: + return OP_BINARY, data + else: + return OP_BINARY, data.tobytes() else: raise TypeError("data must be bytes-like or str") @@ -277,6 +282,8 @@ def encode_data(data): return data.encode('utf-8') elif isinstance(data, collections.abc.ByteString): return bytes(data) + elif isinstance(data, memoryview): + return data.tobytes() else: raise TypeError("data must be bytes-like or str") diff --git a/src/websockets/speedups.c b/src/websockets/speedups.c index bb9c7053f..d1c2b37e6 100644 --- a/src/websockets/speedups.c +++ b/src/websockets/speedups.c @@ -15,6 +15,11 @@ static const Py_ssize_t MASK_LEN = 4; static int _PyBytesLike_AsStringAndSize(PyObject *obj, char **buffer, Py_ssize_t *length) { + // This supports bytes, bytearrays, and C-contiguous memoryview objects, + // which are the most useful data structures for handling byte streams. + // websockets.framing.prepare_data() returns only values of these types. + // Any object implementing the buffer protocol could be supported, however + // that would require allocation or copying memory, which is expensive. if (PyBytes_Check(obj)) { *buffer = PyBytes_AS_STRING(obj); @@ -25,6 +30,23 @@ _PyBytesLike_AsStringAndSize(PyObject *obj, char **buffer, Py_ssize_t *length) *buffer = PyByteArray_AS_STRING(obj); *length = PyByteArray_GET_SIZE(obj); } + else if (PyMemoryView_Check(obj)) + { + Py_buffer *mv_buf; + mv_buf = PyMemoryView_GET_BUFFER(obj); + if (PyBuffer_IsContiguous(mv_buf, 'C')) + { + *buffer = mv_buf->buf; + *length = mv_buf->len; + } + else + { + PyErr_Format( + PyExc_TypeError, + "expected a contiguous memoryview"); + return -1; + } + } else { PyErr_Format( @@ -43,13 +65,14 @@ static PyObject * apply_mask(PyObject *self, PyObject *args, PyObject *kwds) { - // In order to support bytes and bytearray, accept any Python object. + // In order to support various bytes-like types, accept any Python object. static char *kwlist[] = {"data", "mask", NULL}; PyObject *input_obj; PyObject *mask_obj; - // A pointer to the underlying char * will be extracted from these inputs. + // A pointer to a char * + length will be extracted from the data and mask + // arguments, possibly via a Py_buffer. char *input; Py_ssize_t input_len; diff --git a/tests/test_framing.py b/tests/test_framing.py index 570fe3bdf..ab11f6bdc 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -163,6 +163,14 @@ def test_prepare_data_bytearray(self): prepare_data(bytearray(b'tea')), (OP_BINARY, bytearray(b'tea')) ) + def test_prepare_data_memoryview(self): + self.assertEqual( + prepare_data(memoryview(b'tea')), (OP_BINARY, memoryview(b'tea')) + ) + + def test_prepare_data_non_contiguous_memoryview(self): + self.assertEqual(prepare_data(memoryview(b'tteeaa')[::2]), (OP_BINARY, b'tea')) + def test_prepare_data_list(self): with self.assertRaises(TypeError): prepare_data([]) @@ -180,6 +188,12 @@ def test_encode_data_bytes(self): def test_encode_data_bytearray(self): self.assertEqual(encode_data(bytearray(b'tea')), b'tea') + def test_encode_data_memoryview(self): + self.assertEqual(encode_data(memoryview(b'tea')), b'tea') + + def test_encode_data_non_contiguous_memoryview(self): + self.assertEqual(encode_data(memoryview(b'tteeaa')[::2]), b'tea') + def test_encode_data_list(self): with self.assertRaises(TypeError): encode_data([]) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index c546e4e48..a5eb251c9 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -539,6 +539,14 @@ def test_send_binary_from_bytearray(self): self.loop.run_until_complete(self.protocol.send(bytearray(b'tea'))) self.assertOneFrameSent(True, OP_BINARY, b'tea') + def test_send_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.send(memoryview(b'tea'))) + self.assertOneFrameSent(True, OP_BINARY, b'tea') + + def test_send_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.send(memoryview(b'tteeaa')[::2])) + self.assertOneFrameSent(True, OP_BINARY, b'tea') + def test_send_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.send(42)) @@ -566,6 +574,22 @@ def test_send_iterable_binary_from_bytearray(self): (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') ) + def test_send_iterable_binary_from_memoryview(self): + self.loop.run_until_complete( + self.protocol.send([memoryview(b'te'), memoryview(b'a')]) + ) + self.assertFramesSent( + (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + ) + + def test_send_iterable_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete( + self.protocol.send([memoryview(b'ttee')[::2], memoryview(b'aa')[::2]]) + ) + self.assertFramesSent( + (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + ) + def test_send_empty_iterable(self): self.loop.run_until_complete(self.protocol.send([])) self.assertNoFrameSent() @@ -632,6 +656,14 @@ def test_ping_binary_from_bytearray(self): self.loop.run_until_complete(self.protocol.ping(bytearray(b'tea'))) self.assertOneFrameSent(True, OP_PING, b'tea') + def test_ping_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.ping(memoryview(b'tea'))) + self.assertOneFrameSent(True, OP_PING, b'tea') + + def test_ping_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.ping(memoryview(b'tteeaa')[::2])) + self.assertOneFrameSent(True, OP_PING, b'tea') + def test_ping_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.ping(42)) @@ -681,6 +713,14 @@ def test_pong_binary_from_bytearray(self): self.loop.run_until_complete(self.protocol.pong(bytearray(b'tea'))) self.assertOneFrameSent(True, OP_PONG, b'tea') + def test_pong_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.pong(memoryview(b'tea'))) + self.assertOneFrameSent(True, OP_PONG, b'tea') + + def test_pong_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.pong(memoryview(b'tteeaa')[::2])) + self.assertOneFrameSent(True, OP_PONG, b'tea') + def test_pong_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.pong(42)) diff --git a/tests/test_utils.py b/tests/test_utils.py index d2573e235..1b913fe7f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -9,17 +9,45 @@ class UtilsTests(unittest.TestCase): def apply_mask(*args, **kwargs): return py_apply_mask(*args, **kwargs) + apply_mask_type_combos = list(itertools.product([bytes, bytearray], repeat=2)) + + apply_mask_test_values = [ + (b'', b'1234', b''), + (b'aBcDe', b'\x00\x00\x00\x00', b'aBcDe'), + (b'abcdABCD', b'1234', b'PPPPpppp'), + (b'abcdABCD' * 10, b'1234', b'PPPPpppp' * 10), + ] + def test_apply_mask(self): - for data_type, mask_type in itertools.product([bytes, bytearray], repeat=2): - for data_in, mask, data_out in [ - (b'', b'1234', b''), - (b'aBcDe', b'\x00\x00\x00\x00', b'aBcDe'), - (b'abcdABCD', b'1234', b'PPPPpppp'), - (b'abcdABCD' * 10, b'1234', b'PPPPpppp' * 10), - ]: + for data_type, mask_type in self.apply_mask_type_combos: + for data_in, mask, data_out in self.apply_mask_test_values: data_in, mask = data_type(data_in), mask_type(mask) + + with self.subTest(data_in=data_in, mask=mask): + result = self.apply_mask(data_in, mask) + self.assertEqual(result, data_out) + + def test_apply_mask_memoryview(self): + for data_type, mask_type in self.apply_mask_type_combos: + for data_in, mask, data_out in self.apply_mask_test_values: + data_in, mask = data_type(data_in), mask_type(mask) + data_in, mask = memoryview(data_in), memoryview(mask) + with self.subTest(data_in=data_in, mask=mask): - self.assertEqual(self.apply_mask(data_in, mask), data_out) + result = self.apply_mask(data_in, mask) + self.assertEqual(result, data_out) + + def test_apply_mask_non_contiguous_memoryview(self): + for data_type, mask_type in self.apply_mask_type_combos: + for data_in, mask, data_out in self.apply_mask_test_values: + data_in, mask = data_type(data_in), mask_type(mask) + data_in, mask = memoryview(data_in), memoryview(mask) + data_in, mask = data_in[::-1], mask[::-1] + data_out = data_out[::-1] + + with self.subTest(data_in=data_in, mask=mask): + result = self.apply_mask(data_in, mask) + self.assertEqual(result, data_out) def test_apply_mask_check_input_types(self): for data_in, mask in [(None, None), (b'abcd', None), (None, b'abcd')]: @@ -49,3 +77,16 @@ class SpeedupsTests(UtilsTests): @staticmethod def apply_mask(*args, **kwargs): return c_apply_mask(*args, **kwargs) + + def test_apply_mask_non_contiguous_memoryview(self): + for data_type, mask_type in self.apply_mask_type_combos: + for data_in, mask, data_out in self.apply_mask_test_values: + data_in, mask = data_type(data_in), mask_type(mask) + data_in, mask = memoryview(data_in), memoryview(mask) + data_in, mask = data_in[::-1], mask[::-1] + data_out = data_out[::-1] + + with self.subTest(data_in=data_in, mask=mask): + # The C extension only supports contiguous memoryviews. + with self.assertRaises(TypeError): + self.apply_mask(data_in, mask) From dff6cfc1a285c0a36ad440290fe81b46269bbba8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 4 Nov 2018 18:12:57 +0100 Subject: [PATCH 017/281] Add documentation. Ref #478. --- docs/changelog.rst | 7 +++++++ src/websockets/protocol.py | 15 ++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 393abf1f8..3ec35445d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -13,6 +13,13 @@ Changelog **Version 8.0 adds the reason phrase to the return type of the low-level API** :func:`~http.read_response` **.** +Also: + +* :meth:`~protocol.WebSocketCommonProtocol.send`, + :meth:`~protocol.WebSocketCommonProtocol.ping`, and + :meth:`~protocol.WebSocketCommonProtocol.pong` support bytes-like types + :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`. + 7.0 ... diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 13a370aca..e154a62cf 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -448,10 +448,11 @@ def send(self, data): """ This coroutine sends a message. - It sends :class:`str` as a text frame and :class:`bytes` as a binary - frame. + It sends a string (:class:`str`) as a text frame and a bytes-like + object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) + as a binary frame. - It also accepts an iterable of :class:`str` or :class:`bytes`. Each + It also accepts an iterable of strings or bytes-like objects. Each item is treated as a message fragment and sent in its own frame. All items must be of the same type, or else :meth:`send` will raise a :exc:`TypeError` and the connection will be closed. @@ -572,8 +573,8 @@ def ping(self, data=None): await pong_waiter # only if you want to wait for the pong By default, the ping contains four random bytes. The content may be - overridden with the optional ``data`` argument which must be of type - :class:`str` (which will be encoded to UTF-8) or :class:`bytes`. + overridden with the optional ``data`` argument which must be a string + (which will be encoded to UTF-8) or a bytes-like object. """ yield from self.ensure_open() @@ -603,8 +604,8 @@ def pong(self, data=b''): An unsolicited pong may serve as a unidirectional heartbeat. The content may be overridden with the optional ``data`` argument - which must be of type :class:`str` (which will be encoded to UTF-8) or - :class:`bytes`. + which must be a string (which will be encoded to UTF-8) or a + bytes-like object. """ yield from self.ensure_open() From a4dbe6ccb22fd9a591e7557de8ed9b6aa7202741 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 6 Nov 2018 22:50:49 +0100 Subject: [PATCH 018/281] Fix wait_closed signature in docs. Fix #512. --- docs/api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api.rst b/docs/api.rst index 3971ff8b4..80d64e254 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -68,7 +68,7 @@ Shared .. autoclass:: WebSocketCommonProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) .. automethod:: close(code=1000, reason='') - .. automethod:: wait_closed(code=1000, reason='') + .. automethod:: wait_closed() .. automethod:: recv() .. automethod:: send(data) From b6fc5c06d91fbbd76c5db5293adb9b3269116557 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 11 Nov 2018 10:22:18 +0100 Subject: [PATCH 019/281] Fix side effect of automatic code formatting.` --- src/websockets/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index e154a62cf..52e39a2af 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -888,7 +888,7 @@ def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # Defensive assertion for protocol compliance. if self.state is not _expected_state: # pragma: no cover raise InvalidState( - "Cannot write to a WebSocket " "in the {} state".format(self.state.name) + "Cannot write to a WebSocket in the {} state".format(self.state.name) ) frame = Frame(fin, opcode, data) From 00458f2749bbaeb36280c3129af74f00dab26b3d Mon Sep 17 00:00:00 2001 From: Cory Johns Date: Wed, 12 Dec 2018 17:47:32 -0500 Subject: [PATCH 020/281] Handle redirects in client when connecting Per https://tools.ietf.org/html/rfc6455.html#section-4.2.2 the server may redirect the client during the handshake. This allows the client to handle redirects properly instead of raising an InvalidStatusCode error. --- src/websockets/client.py | 119 ++++++++++++++++++++++------------ src/websockets/exceptions.py | 10 +++ src/websockets/py35/client.py | 41 ++++++++---- tests/test_client_server.py | 87 ++++++++++++++++++++++++- 4 files changed, 202 insertions(+), 55 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 2de160e9c..7b0421a44 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -13,6 +13,7 @@ InvalidMessage, InvalidStatusCode, NegotiationError, + RedirectHandshake, ) from .extensions.permessage_deflate import ClientPerMessageDeflateFactory from .handshake import build_request, check_response @@ -289,8 +290,11 @@ def handshake( self.write_http_request(wsuri.resource_name, request_headers) status_code, response_headers = yield from self.read_http_response() - - if status_code != 101: + if status_code in (301, 302, 303, 307, 308): + if 'Location' not in response_headers: + raise InvalidMessage('Redirect response missing Location') + raise RedirectHandshake(parse_uri(response_headers['Location'])) + elif status_code != 101: raise InvalidStatusCode(status_code) check_response(response_headers, key) @@ -358,6 +362,8 @@ class Connect: """ + MAX_REDIRECTS_ALLOWED = 10 + def __init__( self, uri, @@ -394,8 +400,8 @@ def __init__( if create_protocol is None: create_protocol = klass - wsuri = parse_uri(uri) - if wsuri.secure: + self._wsuri = parse_uri(uri) + if self._wsuri.secure: kwds.setdefault('ssl', True) elif kwds.get('ssl') is not None: raise ValueError( @@ -416,53 +422,86 @@ def __init__( elif compression is not None: raise ValueError("Unsupported compression: {}".format(compression)) - factory = lambda: create_protocol( - host=wsuri.host, - port=wsuri.port, - secure=wsuri.secure, - ping_interval=ping_interval, - ping_timeout=ping_timeout, - close_timeout=close_timeout, - max_size=max_size, - max_queue=max_queue, - read_limit=read_limit, - write_limit=write_limit, - loop=loop, - legacy_recv=legacy_recv, - origin=origin, - extensions=extensions, - subprotocols=subprotocols, - extra_headers=extra_headers, + self._create_protocol = create_protocol + self._ping_interval = ping_interval + self._ping_timeout = ping_timeout + self._close_timeout = close_timeout + self._max_size = max_size + self._max_queue = max_queue + self._read_limit = read_limit + self._write_limit = write_limit + self._loop = loop + self._legacy_recv = legacy_recv + self._klass = klass + self._timeout = timeout + self._compression = compression + self._origin = origin + self._extensions = extensions + self._subprotocols = subprotocols + self._extra_headers = extra_headers + self._kwds = kwds + + def _creating_connection(self): + if self._wsuri.secure: + self._kwds.setdefault('ssl', True) + + factory = lambda: self._create_protocol( + host=self._wsuri.host, + port=self._wsuri.port, + secure=self._wsuri.secure, + ping_interval=self._ping_interval, + ping_timeout=self._ping_timeout, + close_timeout=self._close_timeout, + max_size=self._max_size, + max_queue=self._max_queue, + read_limit=self._read_limit, + write_limit=self._write_limit, + loop=self._loop, + legacy_recv=self._legacy_recv, + origin=self._origin, + extensions=self._extensions, + subprotocols=self._subprotocols, + extra_headers=self._extra_headers, ) - if kwds.get('sock') is None: - host, port = wsuri.host, wsuri.port + if self._kwds.get('sock') is None: + host, port = self._wsuri.host, self._wsuri.port else: # If sock is given, host and port mustn't be specified. host, port = None, None - self._wsuri = wsuri - self._origin = origin + self._wsuri = self._wsuri + self._origin = self._origin # This is a coroutine object. - self._creating_connection = loop.create_connection(factory, host, port, **kwds) + return self._loop.create_connection(factory, host, port, **self._kwds) @asyncio.coroutine def __iter__(self): # pragma: no cover - transport, protocol = yield from self._creating_connection - - try: - yield from protocol.handshake( - self._wsuri, - origin=self._origin, - available_extensions=protocol.available_extensions, - available_subprotocols=protocol.available_subprotocols, - extra_headers=protocol.extra_headers, - ) - except Exception: - protocol.fail_connection() - yield from protocol.wait_closed() - raise + for redirects in range(self.MAX_REDIRECTS_ALLOWED): + transport, protocol = yield from self._creating_connection() + + try: + try: + yield from protocol.handshake( + self._wsuri, + origin=self._origin, + available_extensions=protocol.available_extensions, + available_subprotocols=protocol.available_subprotocols, + extra_headers=protocol.extra_headers, + ) + break # redirection chain ended + except Exception: + protocol.fail_connection() + yield from protocol.wait_closed() + raise + except RedirectHandshake as e: + if self._wsuri.secure and not e.wsuri.secure: + raise InvalidHandshake('Redirect dropped TLS') + self._wsuri = e.wsuri + continue # redirection chain continues + else: + raise InvalidHandshake('Maximum redirects exceeded') self.ws_client = protocol return protocol diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index b34a2c0dc..39fa093ee 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -43,6 +43,16 @@ def __init__(self, status, headers, body=b''): super().__init__(message) +class RedirectHandshake(InvalidHandshake): + """ + Exception raised when a handshake gets redirected. + + """ + + def __init__(self, wsuri): + self.wsuri = wsuri + + class InvalidMessage(InvalidHandshake): """ Exception raised when the HTTP message in a handshake request is malformed. diff --git a/src/websockets/py35/client.py b/src/websockets/py35/client.py index a016ba437..bd902841a 100644 --- a/src/websockets/py35/client.py +++ b/src/websockets/py35/client.py @@ -1,3 +1,6 @@ +from ..exceptions import InvalidHandshake, RedirectHandshake + + async def __aenter__(self): return await self @@ -9,20 +12,30 @@ async def __aexit__(self, exc_type, exc_value, traceback): async def __await_impl__(self): # Duplicated with __iter__ because Python 3.7 requires an async function # (as explained in __await__ below) which Python 3.4 doesn't support. - transport, protocol = await self._creating_connection - - try: - await protocol.handshake( - self._wsuri, - origin=self._origin, - available_extensions=protocol.available_extensions, - available_subprotocols=protocol.available_subprotocols, - extra_headers=protocol.extra_headers, - ) - except Exception: - protocol.fail_connection() - await protocol.wait_closed() - raise + for redirects in range(self.MAX_REDIRECTS_ALLOWED): + transport, protocol = await self._creating_connection() + + try: + try: + await protocol.handshake( + self._wsuri, + origin=self._origin, + available_extensions=protocol.available_extensions, + available_subprotocols=protocol.available_subprotocols, + extra_headers=protocol.extra_headers, + ) + break # redirection chain ended + except Exception: + protocol.fail_connection() + await protocol.wait_closed() + raise + except RedirectHandshake as e: + if self._wsuri.secure and not e.wsuri.secure: + raise InvalidHandshake('Redirect dropped TLS') + self._wsuri = e.wsuri + continue # redirection chain continues + else: + raise InvalidHandshake('Maximum redirects exceeded') self.ws_client = protocol return protocol diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 73866ff63..394d090a7 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1,6 +1,7 @@ import asyncio import contextlib import functools +import http import logging import pathlib import random @@ -19,6 +20,7 @@ from websockets.exceptions import ( ConnectionClosed, InvalidHandshake, + InvalidMessage, InvalidStatusCode, NegotiationError, ) @@ -79,6 +81,16 @@ def temp_test_server(test, **kwds): test.stop_server() +@contextlib.contextmanager +def temp_test_redirecting_server(test, status, + include_location=True, force_insecure=False): + test.start_redirecting_server(status, include_location, force_insecure) + try: + yield + finally: + test.stop_redirecting_server() + + @contextlib.contextmanager def temp_test_client(test, *args, **kwds): test.start_client(*args, **kwds) @@ -227,6 +239,8 @@ class ClientServerTests(unittest.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) + self.server = None + self.redirecting_server = None def tearDown(self): self.loop.close() @@ -237,6 +251,10 @@ def run_loop_once(self): self.loop.call_soon(self.loop.stop) self.loop.run_forever() + @property + def server_context(self): + return None + def start_server(self, **kwds): # Disable compression by default in tests. kwds.setdefault('compression', None) @@ -245,13 +263,30 @@ def start_server(self, **kwds): start_server = serve(handler, 'localhost', 0, **kwds) self.server = self.loop.run_until_complete(start_server) + def start_redirecting_server(self, status, + include_location=True, force_insecure=False): + def _process_request(path, headers): + server_uri = get_server_uri(self.server, self.secure, path) + if force_insecure: + server_uri = server_uri.replace('wss:', 'ws:') + headers = {'Location': server_uri} if include_location else [] + return status, headers, b"" + + start_server = serve(handler, 'localhost', 0, + compression=None, + ping_interval=None, + process_request=_process_request, + ssl=self.server_context) + self.redirecting_server = self.loop.run_until_complete(start_server) + def start_client(self, resource_name='/', user_info=None, **kwds): # Disable compression by default in tests. kwds.setdefault('compression', None) # Disable pings by default in tests. kwds.setdefault('ping_interval', None) secure = kwds.get('ssl') is not None - server_uri = get_server_uri(self.server, secure, resource_name, user_info) + server = self.redirecting_server if self.redirecting_server else self.server + server_uri = get_server_uri(server, secure, resource_name, user_info) start_client = connect(server_uri, **kwds) self.client = self.loop.run_until_complete(start_client) @@ -272,6 +307,17 @@ def stop_server(self): except asyncio.TimeoutError: # pragma: no cover self.fail("Server failed to stop") + def stop_redirecting_server(self): + self.redirecting_server.close() + try: + self.loop.run_until_complete( + asyncio.wait_for(self.redirecting_server.wait_closed(), timeout=1) + ) + except asyncio.TimeoutError: # pragma: no cover + self.fail("Redirecting server failed to stop") + finally: + self.redirecting_server = None + @contextlib.contextmanager def temp_server(self, **kwds): with temp_test_server(self, **kwds): @@ -289,6 +335,37 @@ def test_basic(self): reply = self.loop.run_until_complete(self.client.recv()) self.assertEqual(reply, "Hello!") + @with_server() + def test_redirect(self): + redirect_statuses = [ + http.HTTPStatus.MOVED_PERMANENTLY, + http.HTTPStatus.FOUND, + http.HTTPStatus.SEE_OTHER, + http.HTTPStatus.TEMPORARY_REDIRECT, + http.HTTPStatus.PERMANENT_REDIRECT, + ] + for status in redirect_statuses: + with temp_test_redirecting_server(self, status): + with temp_test_client(self): + self.loop.run_until_complete(self.client.send("Hello!")) + reply = self.loop.run_until_complete(self.client.recv()) + self.assertEqual(reply, "Hello!") + + def test_infinite_redirect(self): + with temp_test_redirecting_server(self, http.HTTPStatus.FOUND): + self.server = self.redirecting_server + with self.assertRaises(InvalidHandshake): + with temp_test_client(self): + self.fail('Did not raise') # pragma: no cover + + @with_server() + def test_redirect_missing_location(self): + with temp_test_redirecting_server(self, http.HTTPStatus.FOUND, + include_location=False): + with self.assertRaises(InvalidMessage): + with temp_test_client(self): + self.fail('Did not raise') # pragma: no cover + def test_explicit_event_loop(self): with self.temp_server(loop=self.loop): with self.temp_client(loop=self.loop): @@ -1070,6 +1147,14 @@ def test_ws_uri_is_rejected(self): # raised only when awaiting. self.loop.run_until_complete(client) # pragma: no cover + @with_server() + def test_redirect_insecure(self): + with temp_test_redirecting_server(self, http.HTTPStatus.FOUND, + force_insecure=True): + with self.assertRaises(InvalidHandshake): + with temp_test_client(self): + self.fail('Did not raise') # pragma: no cover + class ClientServerOriginTests(unittest.TestCase): def setUp(self): From ee92bc490bd762ef575cd2eee8883561d4066f15 Mon Sep 17 00:00:00 2001 From: Cory Johns Date: Wed, 12 Dec 2018 19:57:19 -0500 Subject: [PATCH 021/281] Run black to normalize formatting --- tests/test_client_server.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 394d090a7..86f3ff277 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -82,8 +82,9 @@ def temp_test_server(test, **kwds): @contextlib.contextmanager -def temp_test_redirecting_server(test, status, - include_location=True, force_insecure=False): +def temp_test_redirecting_server( + test, status, include_location=True, force_insecure=False +): test.start_redirecting_server(status, include_location, force_insecure) try: yield @@ -263,8 +264,9 @@ def start_server(self, **kwds): start_server = serve(handler, 'localhost', 0, **kwds) self.server = self.loop.run_until_complete(start_server) - def start_redirecting_server(self, status, - include_location=True, force_insecure=False): + def start_redirecting_server( + self, status, include_location=True, force_insecure=False + ): def _process_request(path, headers): server_uri = get_server_uri(self.server, self.secure, path) if force_insecure: @@ -272,11 +274,15 @@ def _process_request(path, headers): headers = {'Location': server_uri} if include_location else [] return status, headers, b"" - start_server = serve(handler, 'localhost', 0, - compression=None, - ping_interval=None, - process_request=_process_request, - ssl=self.server_context) + start_server = serve( + handler, + 'localhost', + 0, + compression=None, + ping_interval=None, + process_request=_process_request, + ssl=self.server_context, + ) self.redirecting_server = self.loop.run_until_complete(start_server) def start_client(self, resource_name='/', user_info=None, **kwds): @@ -360,8 +366,9 @@ def test_infinite_redirect(self): @with_server() def test_redirect_missing_location(self): - with temp_test_redirecting_server(self, http.HTTPStatus.FOUND, - include_location=False): + with temp_test_redirecting_server( + self, http.HTTPStatus.FOUND, include_location=False + ): with self.assertRaises(InvalidMessage): with temp_test_client(self): self.fail('Did not raise') # pragma: no cover @@ -1149,8 +1156,9 @@ def test_ws_uri_is_rejected(self): @with_server() def test_redirect_insecure(self): - with temp_test_redirecting_server(self, http.HTTPStatus.FOUND, - force_insecure=True): + with temp_test_redirecting_server( + self, http.HTTPStatus.FOUND, force_insecure=True + ): with self.assertRaises(InvalidHandshake): with temp_test_client(self): self.fail('Did not raise') # pragma: no cover From e4b49877e8ffdc618360f5ed85fc1212056a6ded Mon Sep 17 00:00:00 2001 From: Cory Johns Date: Wed, 12 Dec 2018 20:13:01 -0500 Subject: [PATCH 022/281] Fix references to HTTPStatus --- src/websockets/compatibility.py | 25 +++++++++++++++++++++++++ tests/test_client_server.py | 32 ++++++++++++++++++-------------- 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/src/websockets/compatibility.py b/src/websockets/compatibility.py index b6506b70c..369c63e32 100644 --- a/src/websockets/compatibility.py +++ b/src/websockets/compatibility.py @@ -24,6 +24,11 @@ UPGRADE_REQUIRED = http.HTTPStatus.UPGRADE_REQUIRED INTERNAL_SERVER_ERROR = http.HTTPStatus.INTERNAL_SERVER_ERROR SERVICE_UNAVAILABLE = http.HTTPStatus.SERVICE_UNAVAILABLE + MOVED_PERMANENTLY = http.HTTPStatus.MOVED_PERMANENTLY + FOUND = http.HTTPStatus.FOUND + SEE_OTHER = http.HTTPStatus.SEE_OTHER + TEMPORARY_REDIRECT = http.HTTPStatus.TEMPORARY_REDIRECT + PERMANENT_REDIRECT = http.HTTPStatus.PERMANENT_REDIRECT except AttributeError: # pragma: no cover # Python < 3.5 class SWITCHING_PROTOCOLS: @@ -57,3 +62,23 @@ class INTERNAL_SERVER_ERROR: class SERVICE_UNAVAILABLE: value = 503 phrase = "Service Unavailable" + + class MOVED_PERMANENTLY: + value = 301 + phrase = "Moved Permanently" + + class FOUND: + value = 302 + phrase = "Found" + + class SEE_OTHER: + value = 303 + phrase = "See Other" + + class TEMPORARY_REDIRECT: + value = 307 + phrase = "Temporary Redirect" + + class PERMANENT_REDIRECT: + value = 308 + phrase = "Permanent Redirect" diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 86f3ff277..eade7e066 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1,7 +1,6 @@ import asyncio import contextlib import functools -import http import logging import pathlib import random @@ -16,7 +15,16 @@ import warnings from websockets.client import * -from websockets.compatibility import FORBIDDEN, OK, UNAUTHORIZED +from websockets.compatibility import ( + FORBIDDEN, + FOUND, + MOVED_PERMANENTLY, + OK, + PERMANENT_REDIRECT, + SEE_OTHER, + TEMPORARY_REDIRECT, + UNAUTHORIZED, +) from websockets.exceptions import ( ConnectionClosed, InvalidHandshake, @@ -344,11 +352,11 @@ def test_basic(self): @with_server() def test_redirect(self): redirect_statuses = [ - http.HTTPStatus.MOVED_PERMANENTLY, - http.HTTPStatus.FOUND, - http.HTTPStatus.SEE_OTHER, - http.HTTPStatus.TEMPORARY_REDIRECT, - http.HTTPStatus.PERMANENT_REDIRECT, + MOVED_PERMANENTLY, + FOUND, + SEE_OTHER, + TEMPORARY_REDIRECT, + PERMANENT_REDIRECT, ] for status in redirect_statuses: with temp_test_redirecting_server(self, status): @@ -358,7 +366,7 @@ def test_redirect(self): self.assertEqual(reply, "Hello!") def test_infinite_redirect(self): - with temp_test_redirecting_server(self, http.HTTPStatus.FOUND): + with temp_test_redirecting_server(self, FOUND): self.server = self.redirecting_server with self.assertRaises(InvalidHandshake): with temp_test_client(self): @@ -366,9 +374,7 @@ def test_infinite_redirect(self): @with_server() def test_redirect_missing_location(self): - with temp_test_redirecting_server( - self, http.HTTPStatus.FOUND, include_location=False - ): + with temp_test_redirecting_server(self, FOUND, include_location=False): with self.assertRaises(InvalidMessage): with temp_test_client(self): self.fail('Did not raise') # pragma: no cover @@ -1156,9 +1162,7 @@ def test_ws_uri_is_rejected(self): @with_server() def test_redirect_insecure(self): - with temp_test_redirecting_server( - self, http.HTTPStatus.FOUND, force_insecure=True - ): + with temp_test_redirecting_server(self, FOUND, force_insecure=True): with self.assertRaises(InvalidHandshake): with temp_test_client(self): self.fail('Did not raise') # pragma: no cover From 170088dcf478b661097556e97ad60d0dba410408 Mon Sep 17 00:00:00 2001 From: Cory Johns Date: Fri, 21 Dec 2018 15:23:05 -0500 Subject: [PATCH 023/281] Add line to changelog for redirect handling --- docs/changelog.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 3ec35445d..a76e1212e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -20,6 +20,9 @@ Also: :meth:`~protocol.WebSocketCommonProtocol.pong` support bytes-like types :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`. +* :func:`~client.connect()` handles redirects from the server during the + handshake. + 7.0 ... From 5931865413bfe7afa2be6e6e947870668b729e15 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 14:33:07 +0100 Subject: [PATCH 024/281] Normalize string style with black. --- Makefile | 2 +- src/websockets/__main__.py | 30 +- src/websockets/client.py | 48 +-- src/websockets/compatibility.py | 2 +- src/websockets/exceptions.py | 42 +- .../extensions/permessage_deflate.py | 48 +-- src/websockets/framing.py | 54 +-- src/websockets/handshake.py | 70 ++-- src/websockets/headers.py | 54 +-- src/websockets/http.py | 48 +-- src/websockets/protocol.py | 50 +-- src/websockets/py35/client.py | 4 +- src/websockets/server.py | 44 +-- src/websockets/uri.py | 16 +- src/websockets/utils.py | 2 +- src/websockets/version.py | 2 +- tests/extensions/test_permessage_deflate.py | 308 +++++++-------- tests/py35/_test_client_server.py | 18 +- tests/py36/_test_client_server.py | 8 +- tests/test_client_server.py | 328 ++++++++-------- tests/test_framing.py | 88 ++--- tests/test_handshake.py | 68 ++-- tests/test_headers.py | 72 ++-- tests/test_http.py | 118 +++--- tests/test_protocol.py | 358 +++++++++--------- tests/test_uri.py | 16 +- tests/test_utils.py | 18 +- tox.ini | 2 +- 28 files changed, 959 insertions(+), 959 deletions(-) diff --git a/Makefile b/Makefile index 2d77dcfc7..0863f8578 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ export PYTHONPATH=src style: isort --recursive src tests - black --skip-string-normalization src tests + black src tests flake8 src tests test: diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index af9286637..4c880c24c 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -56,19 +56,19 @@ def print_during_input(string): sys.stdout.write( ( # Save cursor position - '\N{ESC}7' + "\N{ESC}7" # Add a new line - '\N{LINE FEED}' + "\N{LINE FEED}" # Move cursor up - '\N{ESC}[A' + "\N{ESC}[A" # Insert blank line, scroll last line down - '\N{ESC}[L' + "\N{ESC}[L" # Print string in the inserted blank line - '{string}\N{LINE FEED}' + "{string}\N{LINE FEED}" # Restore cursor position - '\N{ESC}8' + "\N{ESC}8" # Move cursor down - '\N{ESC}[B' + "\N{ESC}[B" ).format(string=string) ) sys.stdout.flush() @@ -78,11 +78,11 @@ def print_over_input(string): sys.stdout.write( ( # Move cursor to beginning of line - '\N{CARRIAGE RETURN}' + "\N{CARRIAGE RETURN}" # Delete current line - '\N{ESC}[K' + "\N{ESC}[K" # Print string - '{string}\N{LINE FEED}' + "{string}\N{LINE FEED}" ).format(string=string) ) sys.stdout.flush() @@ -119,7 +119,7 @@ def run_client(uri, loop, inputs, stop): except websockets.ConnectionClosed: break else: - print_during_input('< ' + message) + print_during_input("< " + message) if outgoing in done: message = outgoing.result() @@ -141,7 +141,7 @@ def run_client(uri, loop, inputs, stop): def main(): # If we're on Windows, enable VT100 terminal support. - if os.name == 'nt': + if os.name == "nt": try: win_enable_vt100() except RuntimeError as exc: @@ -160,7 +160,7 @@ def main(): description="Interactive WebSocket client.", add_help=False, ) - parser.add_argument('uri', metavar='') + parser.add_argument("uri", metavar="") args = parser.parse_args() # Create an event loop that will run in a background thread. @@ -183,7 +183,7 @@ def main(): try: while True: # Since there's no size limit, put_nowait is identical to put. - message = input('> ') + message = input("> ") loop.call_soon_threadsafe(inputs.put_nowait, message) except (KeyboardInterrupt, EOFError): # ^C, ^D loop.call_soon_threadsafe(stop.set_result, None) @@ -192,5 +192,5 @@ def main(): thread.join() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/websockets/client.py b/src/websockets/client.py index 7b0421a44..66034ce25 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -29,7 +29,7 @@ from .uri import parse_uri -__all__ = ['connect', 'WebSocketClientProtocol'] +__all__ = ["connect", "WebSocketClientProtocol"] logger = logging.getLogger(__name__) @@ -44,7 +44,7 @@ class WebSocketClientProtocol(WebSocketCommonProtocol): """ is_client = True - side = 'client' + side = "client" def __init__( self, @@ -74,7 +74,7 @@ def write_http_request(self, path, headers): # Since the path and headers only contain ASCII characters, # we can keep this simple. - request = 'GET {path} HTTP/1.1\r\n'.format(path=path) + request = "GET {path} HTTP/1.1\r\n".format(path=path) request += str(headers) self.writer.write(request.encode()) @@ -134,7 +134,7 @@ def process_extensions(headers, available_extensions): """ accepted_extensions = [] - header_values = headers.get_all('Sec-WebSocket-Extensions') + header_values = headers.get_all("Sec-WebSocket-Extensions") if header_values: @@ -191,7 +191,7 @@ def process_subprotocol(headers, available_subprotocols): """ subprotocol = None - header_values = headers.get_all('Sec-WebSocket-Protocol') + header_values = headers.get_all("Sec-WebSocket-Protocol") if header_values: @@ -208,7 +208,7 @@ def process_subprotocol(headers, available_subprotocols): if len(parsed_header_values) > 1: raise InvalidHandshake( - "Multiple subprotocols: {}".format(', '.join(parsed_header_values)) + "Multiple subprotocols: {}".format(", ".join(parsed_header_values)) ) subprotocol = parsed_header_values[0] @@ -252,15 +252,15 @@ def handshake( request_headers = Headers() if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover - request_headers['Host'] = wsuri.host + request_headers["Host"] = wsuri.host else: - request_headers['Host'] = '{}:{}'.format(wsuri.host, wsuri.port) + request_headers["Host"] = "{}:{}".format(wsuri.host, wsuri.port) if wsuri.user_info: - request_headers['Authorization'] = build_basic_auth(*wsuri.user_info) + request_headers["Authorization"] = build_basic_auth(*wsuri.user_info) if origin is not None: - request_headers['Origin'] = origin + request_headers["Origin"] = origin key = build_request(request_headers) @@ -271,11 +271,11 @@ def handshake( for extension_factory in available_extensions ] ) - request_headers['Sec-WebSocket-Extensions'] = extensions_header + request_headers["Sec-WebSocket-Extensions"] = extensions_header if available_subprotocols is not None: protocol_header = build_subprotocol_list(available_subprotocols) - request_headers['Sec-WebSocket-Protocol'] = protocol_header + request_headers["Sec-WebSocket-Protocol"] = protocol_header if extra_headers is not None: if isinstance(extra_headers, Headers): @@ -285,15 +285,15 @@ def handshake( for name, value in extra_headers: request_headers[name] = value - request_headers.setdefault('User-Agent', USER_AGENT) + request_headers.setdefault("User-Agent", USER_AGENT) self.write_http_request(wsuri.resource_name, request_headers) status_code, response_headers = yield from self.read_http_response() if status_code in (301, 302, 303, 307, 308): - if 'Location' not in response_headers: - raise InvalidMessage('Redirect response missing Location') - raise RedirectHandshake(parse_uri(response_headers['Location'])) + if "Location" not in response_headers: + raise InvalidMessage("Redirect response missing Location") + raise RedirectHandshake(parse_uri(response_headers["Location"])) elif status_code != 101: raise InvalidStatusCode(status_code) @@ -380,7 +380,7 @@ def __init__( legacy_recv=False, klass=WebSocketClientProtocol, timeout=10, - compression='deflate', + compression="deflate", origin=None, extensions=None, subprotocols=None, @@ -402,14 +402,14 @@ def __init__( self._wsuri = parse_uri(uri) if self._wsuri.secure: - kwds.setdefault('ssl', True) - elif kwds.get('ssl') is not None: + kwds.setdefault("ssl", True) + elif kwds.get("ssl") is not None: raise ValueError( "connect() received a SSL context for a ws:// URI, " "use a wss:// URI to enable TLS" ) - if compression == 'deflate': + if compression == "deflate": if extensions is None: extensions = [] if not any( @@ -443,7 +443,7 @@ def __init__( def _creating_connection(self): if self._wsuri.secure: - self._kwds.setdefault('ssl', True) + self._kwds.setdefault("ssl", True) factory = lambda: self._create_protocol( host=self._wsuri.host, @@ -464,7 +464,7 @@ def _creating_connection(self): extra_headers=self._extra_headers, ) - if self._kwds.get('sock') is None: + if self._kwds.get("sock") is None: host, port = self._wsuri.host, self._wsuri.port else: # If sock is given, host and port mustn't be specified. @@ -497,11 +497,11 @@ def __iter__(self): # pragma: no cover raise except RedirectHandshake as e: if self._wsuri.secure and not e.wsuri.secure: - raise InvalidHandshake('Redirect dropped TLS') + raise InvalidHandshake("Redirect dropped TLS") self._wsuri = e.wsuri continue # redirection chain continues else: - raise InvalidHandshake('Maximum redirects exceeded') + raise InvalidHandshake("Maximum redirects exceeded") self.ws_client = protocol return protocol diff --git a/src/websockets/compatibility.py b/src/websockets/compatibility.py index 369c63e32..8b7a21a5c 100644 --- a/src/websockets/compatibility.py +++ b/src/websockets/compatibility.py @@ -12,7 +12,7 @@ try: # pragma: no cover asyncio_ensure_future = asyncio.ensure_future # Python ≥ 3.5 except AttributeError: # pragma: no cover - asyncio_ensure_future = getattr(asyncio, 'async') # Python < 3.5 + asyncio_ensure_future = getattr(asyncio, "async") # Python < 3.5 try: # pragma: no cover # Python ≥ 3.5 diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 39fa093ee..611e68188 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -1,22 +1,22 @@ __all__ = [ - 'AbortHandshake', - 'ConnectionClosed', - 'DuplicateParameter', - 'InvalidHandshake', - 'InvalidHeader', - 'InvalidHeaderFormat', - 'InvalidHeaderValue', - 'InvalidMessage', - 'InvalidOrigin', - 'InvalidParameterName', - 'InvalidParameterValue', - 'InvalidState', - 'InvalidStatusCode', - 'InvalidUpgrade', - 'InvalidURI', - 'NegotiationError', - 'PayloadTooBig', - 'WebSocketProtocolError', + "AbortHandshake", + "ConnectionClosed", + "DuplicateParameter", + "InvalidHandshake", + "InvalidHeader", + "InvalidHeaderFormat", + "InvalidHeaderValue", + "InvalidMessage", + "InvalidOrigin", + "InvalidParameterName", + "InvalidParameterValue", + "InvalidState", + "InvalidStatusCode", + "InvalidUpgrade", + "InvalidURI", + "NegotiationError", + "PayloadTooBig", + "WebSocketProtocolError", ] @@ -33,7 +33,7 @@ class AbortHandshake(InvalidHandshake): """ - def __init__(self, status, headers, body=b''): + def __init__(self, status, headers, body=b""): self.status = status self.headers = headers self.body = body @@ -69,7 +69,7 @@ class InvalidHeader(InvalidHandshake): def __init__(self, name, value=None): if value is None: message = "Missing {} header".format(name) - elif value == '': + elif value == "": message = "Empty {} header".format(name) else: message = "Invalid {} header: {}".format(name, value) @@ -108,7 +108,7 @@ class InvalidOrigin(InvalidHeader): """ def __init__(self, origin): - super().__init__('Origin', origin) + super().__init__("Origin", origin) class InvalidStatusCode(InvalidHandshake): diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 19f340734..dad6f1ec1 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -17,12 +17,12 @@ __all__ = [ - 'ClientPerMessageDeflateFactory', - 'ServerPerMessageDeflateFactory', - 'PerMessageDeflate', + "ClientPerMessageDeflateFactory", + "ServerPerMessageDeflateFactory", + "PerMessageDeflate", ] -_EMPTY_UNCOMPRESSED_BLOCK = b'\x00\x00\xff\xff' +_EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff" _MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)] @@ -39,15 +39,15 @@ def _build_parameters( """ params = [] if server_no_context_takeover: - params.append(('server_no_context_takeover', None)) + params.append(("server_no_context_takeover", None)) if client_no_context_takeover: - params.append(('client_no_context_takeover', None)) + params.append(("client_no_context_takeover", None)) if server_max_window_bits: - params.append(('server_max_window_bits', str(server_max_window_bits))) + params.append(("server_max_window_bits", str(server_max_window_bits))) if client_max_window_bits is True: # only in handshake requests - params.append(('client_max_window_bits', None)) + params.append(("client_max_window_bits", None)) elif client_max_window_bits: - params.append(('client_max_window_bits', str(client_max_window_bits))) + params.append(("client_max_window_bits", str(client_max_window_bits))) return params @@ -66,7 +66,7 @@ def _extract_parameters(params, *, is_server): for name, value in params: - if name == 'server_no_context_takeover': + if name == "server_no_context_takeover": if server_no_context_takeover: raise DuplicateParameter(name) if value is None: @@ -74,7 +74,7 @@ def _extract_parameters(params, *, is_server): else: raise InvalidParameterValue(name, value) - elif name == 'client_no_context_takeover': + elif name == "client_no_context_takeover": if client_no_context_takeover: raise DuplicateParameter(name) if value is None: @@ -82,7 +82,7 @@ def _extract_parameters(params, *, is_server): else: raise InvalidParameterValue(name, value) - elif name == 'server_max_window_bits': + elif name == "server_max_window_bits": if server_max_window_bits is not None: raise DuplicateParameter(name) if value in _MAX_WINDOW_BITS_VALUES: @@ -90,7 +90,7 @@ def _extract_parameters(params, *, is_server): else: raise InvalidParameterValue(name, value) - elif name == 'client_max_window_bits': + elif name == "client_max_window_bits": if client_max_window_bits is not None: raise DuplicateParameter(name) if is_server and value is None: # only in handshake requests @@ -117,7 +117,7 @@ class ClientPerMessageDeflateFactory: """ - name = 'permessage-deflate' + name = "permessage-deflate" def __init__( self, @@ -141,7 +141,7 @@ def __init__( or 8 <= client_max_window_bits <= 15 ): raise ValueError("client_max_window_bits must be between 8 and 15") - if compress_settings is not None and 'wbits' in compress_settings: + if compress_settings is not None and "wbits" in compress_settings: raise ValueError( "compress_settings must not include wbits, " "set client_max_window_bits instead" @@ -273,7 +273,7 @@ class ServerPerMessageDeflateFactory: """ - name = 'permessage-deflate' + name = "permessage-deflate" def __init__( self, @@ -293,7 +293,7 @@ def __init__( raise ValueError("server_max_window_bits must be between 8 and 15") if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15): raise ValueError("client_max_window_bits must be between 8 and 15") - if compress_settings is not None and 'wbits' in compress_settings: + if compress_settings is not None and "wbits" in compress_settings: raise ValueError( "compress_settings must not include wbits, " "set server_max_window_bits instead" @@ -420,7 +420,7 @@ class PerMessageDeflate: """ - name = 'permessage-deflate' + name = "permessage-deflate" def __init__( self, @@ -441,7 +441,7 @@ def __init__( assert local_no_context_takeover in [False, True] assert 8 <= remote_max_window_bits <= 15 assert 8 <= local_max_window_bits <= 15 - assert 'wbits' not in compress_settings + assert "wbits" not in compress_settings self.remote_no_context_takeover = remote_no_context_takeover self.local_no_context_takeover = local_no_context_takeover @@ -465,11 +465,11 @@ def __init__( def __repr__(self): return ( - 'PerMessageDeflate(' - 'remote_no_context_takeover={}, ' - 'local_no_context_takeover={}, ' - 'remote_max_window_bits={}, ' - 'local_max_window_bits={})' + "PerMessageDeflate(" + "remote_no_context_takeover={}, " + "local_no_context_takeover={}, " + "remote_max_window_bits={}, " + "local_max_window_bits={})" ).format( self.remote_no_context_takeover, self.local_no_context_takeover, diff --git a/src/websockets/framing.py b/src/websockets/framing.py index feebd3983..8b0242715 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -25,19 +25,19 @@ __all__ = [ - 'DATA_OPCODES', - 'CTRL_OPCODES', - 'OP_CONT', - 'OP_TEXT', - 'OP_BINARY', - 'OP_CLOSE', - 'OP_PING', - 'OP_PONG', - 'Frame', - 'prepare_data', - 'encode_data', - 'parse_close', - 'serialize_close', + "DATA_OPCODES", + "CTRL_OPCODES", + "OP_CONT", + "OP_TEXT", + "OP_BINARY", + "OP_CLOSE", + "OP_PING", + "OP_PONG", + "Frame", + "prepare_data", + "encode_data", + "parse_close", + "serialize_close", ] DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY = 0x00, 0x01, 0x02 @@ -48,7 +48,7 @@ EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] FrameData = collections.namedtuple( - 'FrameData', ['fin', 'opcode', 'data', 'rsv1', 'rsv2', 'rsv3'] + "FrameData", ["fin", "opcode", "data", "rsv1", "rsv2", "rsv3"] ) @@ -98,7 +98,7 @@ def read(cls, reader, *, mask, max_size=None, extensions=None): """ # Read the header. data = yield from reader(2) - head1, head2 = struct.unpack('!BB', data) + head1, head2 = struct.unpack("!BB", data) # While not Pythonic, this is marginally faster than calling bool(). fin = True if head1 & 0b10000000 else False @@ -113,10 +113,10 @@ def read(cls, reader, *, mask, max_size=None, extensions=None): length = head2 & 0b01111111 if length == 126: data = yield from reader(2) - length, = struct.unpack('!H', data) + length, = struct.unpack("!H", data) elif length == 127: data = yield from reader(8) - length, = struct.unpack('!Q', data) + length, = struct.unpack("!Q", data) if max_size is not None and length > max_size: raise PayloadTooBig( "Payload length exceeds size limit ({} > {} bytes)".format( @@ -187,14 +187,14 @@ def write(frame, writer, *, mask, extensions=None): length = len(frame.data) if length < 126: - output.write(struct.pack('!BB', head1, head2 | length)) + output.write(struct.pack("!BB", head1, head2 | length)) elif length < 65536: - output.write(struct.pack('!BBH', head1, head2 | 126, length)) + output.write(struct.pack("!BBH", head1, head2 | 126, length)) else: - output.write(struct.pack('!BBQ', head1, head2 | 127, length)) + output.write(struct.pack("!BBQ", head1, head2 | 127, length)) if mask: - mask_bits = struct.pack('!I', random.getrandbits(32)) + mask_bits = struct.pack("!I", random.getrandbits(32)) output.write(mask_bits) # Prepare the data. @@ -252,7 +252,7 @@ def prepare_data(data): """ if isinstance(data, str): - return OP_TEXT, data.encode('utf-8') + return OP_TEXT, data.encode("utf-8") elif isinstance(data, collections.abc.ByteString): return OP_BINARY, data elif isinstance(data, memoryview): @@ -279,7 +279,7 @@ def encode_data(data): """ if isinstance(data, str): - return data.encode('utf-8') + return data.encode("utf-8") elif isinstance(data, collections.abc.ByteString): return bytes(data) elif isinstance(data, memoryview): @@ -301,12 +301,12 @@ def parse_close(data): """ length = len(data) if length >= 2: - code, = struct.unpack('!H', data[:2]) + code, = struct.unpack("!H", data[:2]) check_close(code) - reason = data[2:].decode('utf-8') + reason = data[2:].decode("utf-8") return code, reason elif length == 0: - return 1005, '' + return 1005, "" else: assert length == 1 raise WebSocketProtocolError("Close frame too short") @@ -320,7 +320,7 @@ def serialize_close(code, reason): """ check_close(code) - return struct.pack('!H', code) + reason.encode('utf-8') + return struct.pack("!H", code) + reason.encode("utf-8") def check_close(code): diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index cc4248974..e6bd61fab 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -41,9 +41,9 @@ from .http import MultipleValuesError -__all__ = ['build_request', 'check_request', 'build_response', 'check_response'] +__all__ = ["build_request", "check_request", "build_response", "check_response"] -GUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' +GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" def build_request(headers): @@ -55,10 +55,10 @@ def build_request(headers): """ raw_key = bytes(random.getrandbits(8) for _ in range(16)) key = base64.b64encode(raw_key).decode() - headers['Upgrade'] = 'websocket' - headers['Connection'] = 'Upgrade' - headers['Sec-WebSocket-Key'] = key - headers['Sec-WebSocket-Version'] = '13' + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Key"] = key + headers["Sec-WebSocket-Version"] = "13" return key @@ -79,46 +79,46 @@ def check_request(headers): """ connection = sum( - [parse_connection(value) for value in headers.get_all('Connection')], [] + [parse_connection(value) for value in headers.get_all("Connection")], [] ) - if not any(value.lower() == 'upgrade' for value in connection): - raise InvalidUpgrade('Connection', connection) + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", connection) - upgrade = sum([parse_upgrade(value) for value in headers.get_all('Upgrade')], []) + upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. - if not (len(upgrade) == 1 and upgrade[0].lower() == 'websocket'): - raise InvalidUpgrade('Upgrade', upgrade) + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", upgrade) try: - s_w_key = headers['Sec-WebSocket-Key'] + s_w_key = headers["Sec-WebSocket-Key"] except KeyError: - raise InvalidHeader('Sec-WebSocket-Key') + raise InvalidHeader("Sec-WebSocket-Key") except MultipleValuesError: raise InvalidHeader( - 'Sec-WebSocket-Key', "more than one Sec-WebSocket-Key header found" + "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" ) try: raw_key = base64.b64decode(s_w_key.encode(), validate=True) except binascii.Error: - raise InvalidHeaderValue('Sec-WebSocket-Key', s_w_key) + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) if len(raw_key) != 16: - raise InvalidHeaderValue('Sec-WebSocket-Key', s_w_key) + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) try: - s_w_version = headers['Sec-WebSocket-Version'] + s_w_version = headers["Sec-WebSocket-Version"] except KeyError: - raise InvalidHeader('Sec-WebSocket-Version') + raise InvalidHeader("Sec-WebSocket-Version") except MultipleValuesError: raise InvalidHeader( - 'Sec-WebSocket-Version', "more than one Sec-WebSocket-Version header found" + "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found" ) - if s_w_version != '13': - raise InvalidHeaderValue('Sec-WebSocket-Version', s_w_version) + if s_w_version != "13": + raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version) return s_w_key @@ -130,9 +130,9 @@ def build_response(headers, key): ``key`` comes from :func:`check_request`. """ - headers['Upgrade'] = 'websocket' - headers['Connection'] = 'Upgrade' - headers['Sec-WebSocket-Accept'] = accept(key) + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Accept"] = accept(key) def check_response(headers, key): @@ -152,30 +152,30 @@ def check_response(headers, key): """ connection = sum( - [parse_connection(value) for value in headers.get_all('Connection')], [] + [parse_connection(value) for value in headers.get_all("Connection")], [] ) - if not any(value.lower() == 'upgrade' for value in connection): - raise InvalidUpgrade('Connection', connection) + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", connection) - upgrade = sum([parse_upgrade(value) for value in headers.get_all('Upgrade')], []) + upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. - if not (len(upgrade) == 1 and upgrade[0].lower() == 'websocket'): - raise InvalidUpgrade('Upgrade', upgrade) + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", upgrade) try: - s_w_accept = headers['Sec-WebSocket-Accept'] + s_w_accept = headers["Sec-WebSocket-Accept"] except KeyError: - raise InvalidHeader('Sec-WebSocket-Accept') + raise InvalidHeader("Sec-WebSocket-Accept") except MultipleValuesError: raise InvalidHeader( - 'Sec-WebSocket-Accept', "more than one Sec-WebSocket-Accept header found" + "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found" ) if s_w_accept != accept(key): - raise InvalidHeaderValue('Sec-WebSocket-Accept', s_w_accept) + raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) def accept(key): diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 937962376..6151b16db 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -14,12 +14,12 @@ __all__ = [ - 'parse_connection', - 'parse_upgrade', - 'parse_extension_list', - 'build_extension_list', - 'parse_subprotocol_list', - 'build_subprotocol_list', + "parse_connection", + "parse_upgrade", + "parse_extension_list", + "build_extension_list", + "parse_subprotocol_list", + "build_subprotocol_list", ] @@ -40,7 +40,7 @@ def peek_ahead(string, pos): return None if pos == len(string) else string[pos] -_OWS_re = re.compile(r'[\t ]*') +_OWS_re = re.compile(r"[\t ]*") def parse_OWS(string, pos): @@ -57,7 +57,7 @@ def parse_OWS(string, pos): return match.end() -_token_re = re.compile(r'[-!#$%&\'*+.^_`|~0-9a-zA-Z]+') +_token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") def parse_token(string, pos, header_name): @@ -80,7 +80,7 @@ def parse_token(string, pos, header_name): ) -_unquote_re = re.compile(r'\\([\x09\x20-\x7e\x80-\xff])') +_unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])") def parse_quoted_string(string, pos, header_name): @@ -97,7 +97,7 @@ def parse_quoted_string(string, pos, header_name): raise InvalidHeaderFormat( header_name, "expected quoted string", string=string, pos=pos ) - return _unquote_re.sub(r'\1', match.group()[1:-1]), match.end() + return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end() def parse_list(parse_item, string, pos, header_name): @@ -125,7 +125,7 @@ def parse_list(parse_item, string, pos, header_name): # while loops that remove extra delimiters. # Remove extra delimiters before the first item. - while peek_ahead(string, pos) == ',': + while peek_ahead(string, pos) == ",": pos = parse_OWS(string, pos + 1) items = [] @@ -140,7 +140,7 @@ def parse_list(parse_item, string, pos, header_name): break # There must be a delimiter after each element except the last one. - if peek_ahead(string, pos) == ',': + if peek_ahead(string, pos) == ",": pos = parse_OWS(string, pos + 1) else: raise InvalidHeaderFormat( @@ -148,7 +148,7 @@ def parse_list(parse_item, string, pos, header_name): ) # Remove extra delimiters before the next item. - while peek_ahead(string, pos) == ',': + while peek_ahead(string, pos) == ",": pos = parse_OWS(string, pos + 1) # We may have reached the end of the string. @@ -171,11 +171,11 @@ def parse_connection(string): Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_token, string, 0, 'Connection') + return parse_list(parse_token, string, 0, "Connection") _protocol_re = re.compile( - r'[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?' + r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?" ) @@ -205,7 +205,7 @@ def parse_upgrade(string): Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_protocol, string, 0, 'Upgrade') + return parse_list(parse_protocol, string, 0, "Upgrade") def parse_extension_param(string, pos, header_name): @@ -221,7 +221,7 @@ def parse_extension_param(string, pos, header_name): name, pos = parse_token(string, pos, header_name) pos = parse_OWS(string, pos) # Extract parameter string, if there is one. - if peek_ahead(string, pos) == '=': + if peek_ahead(string, pos) == "=": pos = parse_OWS(string, pos + 1) if peek_ahead(string, pos) == '"': pos_before = pos # for proper error reporting below @@ -259,7 +259,7 @@ def parse_extension(string, pos, header_name): pos = parse_OWS(string, pos) # Extract all parameters. parameters = [] - while peek_ahead(string, pos) == ';': + while peek_ahead(string, pos) == ";": pos = parse_OWS(string, pos + 1) parameter, pos = parse_extension_param(string, pos, header_name) parameters.append(parameter) @@ -288,7 +288,7 @@ def parse_extension_list(string): Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_extension, string, 0, 'Sec-WebSocket-Extensions') + return parse_list(parse_extension, string, 0, "Sec-WebSocket-Extensions") def build_extension(name, parameters): @@ -298,11 +298,11 @@ def build_extension(name, parameters): This is the reverse of :func:`parse_extension`. """ - return '; '.join( + return "; ".join( [name] + [ # Quoted strings aren't necessary because values are always tokens. - name if value is None else '{}={}'.format(name, value) + name if value is None else "{}={}".format(name, value) for name, value in parameters ] ) @@ -315,7 +315,7 @@ def build_extension_list(extensions): This is the reverse of :func:`parse_extension_list`. """ - return ', '.join( + return ", ".join( build_extension(name, parameters) for name, parameters in extensions ) @@ -327,7 +327,7 @@ def parse_subprotocol_list(string): Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_token, string, 0, 'Sec-WebSocket-Protocol') + return parse_list(parse_token, string, 0, "Sec-WebSocket-Protocol") def build_subprotocol_list(protocols): @@ -337,7 +337,7 @@ def build_subprotocol_list(protocols): This is the reverse of :func:`parse_subprotocol_list`. """ - return ', '.join(protocols) + return ", ".join(protocols) def build_basic_auth(username, password): @@ -346,7 +346,7 @@ def build_basic_auth(username, password): """ # https://tools.ietf.org/html/rfc7617#section-2 - assert ':' not in username - user_pass = '{}:{}'.format(username, password) + assert ":" not in username + user_pass = "{}:{}".format(username, password) basic_credentials = base64.b64encode(user_pass.encode()).decode() - return 'Basic ' + basic_credentials + return "Basic " + basic_credentials diff --git a/src/websockets/http.py b/src/websockets/http.py index 5062c03d7..ea17e0a2e 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -16,24 +16,24 @@ __all__ = [ - 'Headers', - 'MultipleValuesError', - 'read_request', - 'read_response', - 'USER_AGENT', + "Headers", + "MultipleValuesError", + "read_request", + "read_response", + "USER_AGENT", ] MAX_HEADERS = 256 MAX_LINE = 4096 -USER_AGENT = 'Python/{} websockets/{}'.format(sys.version[:3], websockets_version) +USER_AGENT = "Python/{} websockets/{}".format(sys.version[:3], websockets_version) # See https://tools.ietf.org/html/rfc7230#appendix-B. # Regex for validating header names. -_token_re = re.compile(rb'[-!#$%&\'*+.^_`|~0-9a-zA-Z]+') +_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") # Regex for validating header values. @@ -46,7 +46,7 @@ # See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 -_value_re = re.compile(rb'[\x09\x20-\x7e\x80-\xff]*') +_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") @asyncio.coroutine @@ -79,13 +79,13 @@ def read_request(stream): request_line = yield from read_line(stream) # This may raise "ValueError: not enough values to unpack" - method, path, version = request_line.split(b' ', 2) + method, path, version = request_line.split(b" ", 2) - if method != b'GET': + if method != b"GET": raise ValueError("Unsupported HTTP method: %r" % method) - if version != b'HTTP/1.1': + if version != b"HTTP/1.1": raise ValueError("Unsupported HTTP version: %r" % version) - path = path.decode('ascii', 'surrogateescape') + path = path.decode("ascii", "surrogateescape") headers = yield from read_headers(stream) @@ -120,9 +120,9 @@ def read_response(stream): status_line = yield from read_line(stream) # This may raise "ValueError: not enough values to unpack" - version, status_code, reason = status_line.split(b' ', 2) + version, status_code, reason = status_line.split(b" ", 2) - if version != b'HTTP/1.1': + if version != b"HTTP/1.1": raise ValueError("Unsupported HTTP version: %r" % version) # This may raise "ValueError: invalid literal for int() with base 10" status_code = int(status_code) @@ -156,19 +156,19 @@ def read_headers(stream): headers = Headers() for _ in range(MAX_HEADERS + 1): line = yield from read_line(stream) - if line == b'': + if line == b"": break # This may raise "ValueError: not enough values to unpack" - name, value = line.split(b':', 1) + name, value = line.split(b":", 1) if not _token_re.fullmatch(name): raise ValueError("Invalid HTTP header name: %r" % name) - value = value.strip(b' \t') + value = value.strip(b" \t") if not _value_re.fullmatch(value): raise ValueError("Invalid HTTP header value: %r" % value) - name = name.decode('ascii') # guaranteed to be ASCII at this point - value = value.decode('ascii', 'surrogateescape') + name = name.decode("ascii") # guaranteed to be ASCII at this point + value = value.decode("ascii", "surrogateescape") headers[name] = value else: @@ -193,7 +193,7 @@ def read_line(stream): if len(line) > MAX_LINE: raise ValueError("Line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 - if not line.endswith(b'\r\n'): + if not line.endswith(b"\r\n"): raise ValueError("Line without CRLF") return line[:-2] @@ -248,7 +248,7 @@ class Headers(collections.abc.MutableMapping): """ - __slots__ = ['_dict', '_list'] + __slots__ = ["_dict", "_list"] def __init__(self, *args, **kwargs): self._dict = {} @@ -258,12 +258,12 @@ def __init__(self, *args, **kwargs): def __str__(self): return ( - ''.join('{}: {}\r\n'.format(key, value) for key, value in self._list) - + '\r\n' + "".join("{}: {}\r\n".format(key, value) for key, value in self._list) + + "\r\n" ) def __repr__(self): - return '{}({})'.format(self.__class__.__name__, repr(self._list)) + return "{}({})".format(self.__class__.__name__, repr(self._list)) def copy(self): copy = self.__class__() diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 52e39a2af..d7d7282a1 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -29,7 +29,7 @@ from .handshake import * -__all__ = ['WebSocketCommonProtocol'] +__all__ = ["WebSocketCommonProtocol"] logger = logging.getLogger(__name__) @@ -37,7 +37,7 @@ # On Python ≥ 3.7, silence a deprecation warning that we can't address before # dropping support for Python < 3.5. warnings.filterwarnings( - action='ignore', + action="ignore", message=r"'with \(yield from lock\)' is deprecated use 'async with lock' instead", category=DeprecationWarning, ) @@ -163,7 +163,7 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): # side behavior: masking the payload and closing the underlying TCP # connection. Set is_client and side to pick a side. is_client = None - side = 'undefined' + side = "undefined" def __init__( self, @@ -236,7 +236,7 @@ def __init__( # The close code and reason are set when receiving a close frame or # losing the TCP connection. self.close_code = None - self.close_reason = '' + self.close_reason = "" # Completed when the connection state becomes CLOSED. Translates the # :meth:`connection_lost()` callback to a :class:`~asyncio.Future` @@ -313,7 +313,7 @@ def local_address(self): """ if self.writer is None: return None - return self.writer.get_extra_info('sockname') + return self.writer.get_extra_info("sockname") @property def remote_address(self): @@ -326,7 +326,7 @@ def remote_address(self): """ if self.writer is None: return None - return self.writer.get_extra_info('peername') + return self.writer.get_extra_info("peername") @property def open(self): @@ -498,7 +498,7 @@ def send(self, data): yield from self.write_frame(False, OP_CONT, data) # Final fragment. - yield from self.write_frame(True, OP_CONT, b'') + yield from self.write_frame(True, OP_CONT, b"") # Fragmented message -- asynchronous iterator @@ -508,7 +508,7 @@ def send(self, data): raise TypeError("data must be bytes, str, or iterable") @asyncio.coroutine - def close(self, code=1000, reason=''): + def close(self, code=1000, reason=""): """ This coroutine performs the closing handshake. @@ -588,7 +588,7 @@ def ping(self, data=None): # Generate a unique random payload otherwise. while data is None or data in self.pings: - data = struct.pack('!I', random.getrandbits(32)) + data = struct.pack("!I", random.getrandbits(32)) self.pings[data] = asyncio.Future(loop=self.loop) @@ -597,7 +597,7 @@ def ping(self, data=None): return asyncio.shield(self.pings[data]) @asyncio.coroutine - def pong(self, data=b''): + def pong(self, data=b""): """ This coroutine sends a pong. @@ -751,13 +751,13 @@ def read_message(self): # Shortcut for the common case - no fragmentation if frame.fin: - return frame.data.decode('utf-8') if text else frame.data + return frame.data.decode("utf-8") if text else frame.data # 5.4. Fragmentation chunks = [] max_size = self.max_size if text: - decoder = codecs.getincrementaldecoder('utf-8')(errors='strict') + decoder = codecs.getincrementaldecoder("utf-8")(errors="strict") if max_size is None: def append(frame): @@ -795,7 +795,7 @@ def append(frame): raise WebSocketProtocolError("Unexpected opcode") append(frame) - return ('' if text else b'').join(chunks) + return ("" if text else b"").join(chunks) @asyncio.coroutine def read_data_frame(self, max_size): @@ -825,7 +825,7 @@ def read_data_frame(self, max_size): elif frame.opcode == OP_PING: # Answer pings. # Replace by frame.data.hex() when dropping Python < 3.5. - ping_hex = binascii.hexlify(frame.data).decode() or '[empty]' + ping_hex = binascii.hexlify(frame.data).decode() or "[empty]" logger.debug( "%s - received ping, sending pong: %s", self.side, ping_hex ) @@ -841,17 +841,17 @@ def read_data_frame(self, max_size): ping_id, pong_waiter = self.pings.popitem(0) ping_ids.append(ping_id) pong_waiter.set_result(None) - pong_hex = binascii.hexlify(frame.data).decode() or '[empty]' + pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" logger.debug( "%s - received solicited pong: %s", self.side, pong_hex ) ping_ids = ping_ids[:-1] if ping_ids: - pings_hex = ', '.join( - binascii.hexlify(ping_id).decode() or '[empty]' + pings_hex = ", ".join( + binascii.hexlify(ping_id).decode() or "[empty]" for ping_id in ping_ids ) - plural = 's' if len(ping_ids) > 1 else '' + plural = "s" if len(ping_ids) > 1 else "" logger.debug( "%s - acknowledged previous ping%s: %s", self.side, @@ -859,7 +859,7 @@ def read_data_frame(self, max_size): pings_hex, ) else: - pong_hex = binascii.hexlify(frame.data).decode() or '[empty]' + pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" logger.debug( "%s - received unsolicited pong: %s", self.side, pong_hex ) @@ -935,7 +935,7 @@ def writer_is_closing(self): return transport._closed @asyncio.coroutine - def write_close_frame(self, data=b''): + def write_close_frame(self, data=b""): """ Write a close frame if and only if the connection state is OPEN. @@ -1083,7 +1083,7 @@ def wait_for_connection_lost(self): # and the moment this coroutine resumes running. return self.connection_lost_waiter.done() - def fail_connection(self, code=1006, reason=''): + def fail_connection(self, code=1006, reason=""): """ 7.1.7. Fail the WebSocket Connection @@ -1161,11 +1161,11 @@ def abort_keepalive_pings(self): ping.set_exception(exc) if self.pings: - pings_hex = ', '.join( - binascii.hexlify(ping_id).decode() or '[empty]' + pings_hex = ", ".join( + binascii.hexlify(ping_id).decode() or "[empty]" for ping_id in self.pings ) - plural = 's' if len(self.pings) > 1 else '' + plural = "s" if len(self.pings) > 1 else "" logger.debug( "%s - aborted pending ping%s: %s", self.side, plural, pings_hex ) @@ -1231,7 +1231,7 @@ def connection_lost(self, exc): "%s x code = %d, reason = %s", self.side, self.close_code, - self.close_reason or '[no reason]', + self.close_reason or "[no reason]", ) self.abort_keepalive_pings() # If self.connection_lost_waiter isn't pending, that's a bug, because: diff --git a/src/websockets/py35/client.py b/src/websockets/py35/client.py index bd902841a..ccb098483 100644 --- a/src/websockets/py35/client.py +++ b/src/websockets/py35/client.py @@ -31,11 +31,11 @@ async def __await_impl__(self): raise except RedirectHandshake as e: if self._wsuri.secure and not e.wsuri.secure: - raise InvalidHandshake('Redirect dropped TLS') + raise InvalidHandshake("Redirect dropped TLS") self._wsuri = e.wsuri continue # redirection chain continues else: - raise InvalidHandshake('Maximum redirects exceeded') + raise InvalidHandshake("Maximum redirects exceeded") self.ws_client = protocol return protocol diff --git a/src/websockets/server.py b/src/websockets/server.py index 1d88e73a1..e207db2bc 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -35,7 +35,7 @@ from .protocol import State, WebSocketCommonProtocol -__all__ = ['serve', 'unix_serve', 'WebSocketServerProtocol'] +__all__ = ["serve", "unix_serve", "WebSocketServerProtocol"] logger = logging.getLogger(__name__) @@ -53,7 +53,7 @@ class WebSocketServerProtocol(WebSocketCommonProtocol): """ is_client = False - side = 'server' + side = "server" def __init__( self, @@ -69,9 +69,9 @@ def __init__( **kwds ): # For backwards-compatibility with 6.0 or earlier. - if origins is not None and '' in origins: + if origins is not None and "" in origins: warnings.warn("use None instead of '' in origins", DeprecationWarning) - origins = [None if origin == '' else origin for origin in origins] + origins = [None if origin == "" else origin for origin in origins] self.ws_handler = ws_handler self.ws_server = ws_server self.origins = origins @@ -129,7 +129,7 @@ def handler(self): logger.debug("Invalid upgrade", exc_info=True) status, headers, body = ( UPGRADE_REQUIRED, - [('Upgrade', 'websocket')], + [("Upgrade", "websocket")], (str(exc) + "\n").encode(), ) elif isinstance(exc, InvalidHandshake): @@ -150,11 +150,11 @@ def handler(self): if not isinstance(headers, Headers): headers = Headers(headers) - headers.setdefault('Date', email.utils.formatdate(usegmt=True)) - headers.setdefault('Server', USER_AGENT) - headers.setdefault('Content-Length', str(len(body))) - headers.setdefault('Content-Type', 'text/plain') - headers.setdefault('Connection', 'close') + headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + headers.setdefault("Server", USER_AGENT) + headers.setdefault("Content-Length", str(len(body))) + headers.setdefault("Content-Type", "text/plain") + headers.setdefault("Connection", "close") self.write_http_response(status, headers, body) self.fail_connection() @@ -232,7 +232,7 @@ def write_http_response(self, status, headers, body=None): # Since the status line and headers only contain ASCII characters, # we can keep this simple. - response = 'HTTP/1.1 {status.value} {status.phrase}\r\n'.format(status=status) + response = "HTTP/1.1 {status.value} {status.phrase}\r\n".format(status=status) response += str(headers) self.writer.write(response.encode()) @@ -287,9 +287,9 @@ def process_origin(headers, origins=None): # "The user agent MUST NOT include more than one Origin header field" # per https://tools.ietf.org/html/rfc6454#section-7.3. try: - origin = headers.get('Origin') + origin = headers.get("Origin") except MultipleValuesError: - raise InvalidHeader('Origin', "more than one Origin header found") + raise InvalidHeader("Origin", "more than one Origin header found") if origins is not None: if origin not in origins: raise InvalidOrigin(origin) @@ -332,7 +332,7 @@ def process_extensions(headers, available_extensions): response_header = [] accepted_extensions = [] - header_values = headers.get_all('Sec-WebSocket-Extensions') + header_values = headers.get_all("Sec-WebSocket-Extensions") if header_values and available_extensions: @@ -386,7 +386,7 @@ def process_subprotocol(self, headers, available_subprotocols): """ subprotocol = None - header_values = headers.get_all('Sec-WebSocket-Protocol') + header_values = headers.get_all("Sec-WebSocket-Protocol") if header_values and available_subprotocols: @@ -498,10 +498,10 @@ def handshake( build_response(response_headers, key) if extensions_header is not None: - response_headers['Sec-WebSocket-Extensions'] = extensions_header + response_headers["Sec-WebSocket-Extensions"] = extensions_header if protocol_header is not None: - response_headers['Sec-WebSocket-Protocol'] = protocol_header + response_headers["Sec-WebSocket-Protocol"] = protocol_header if extra_headers is not None: if callable(extra_headers): @@ -513,8 +513,8 @@ def handshake( for name, value in extra_headers: response_headers[name] = value - response_headers.setdefault('Date', email.utils.formatdate(usegmt=True)) - response_headers.setdefault('Server', USER_AGENT) + response_headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + response_headers.setdefault("Server", USER_AGENT) self.write_http_response(SWITCHING_PROTOCOLS, response_headers) @@ -778,7 +778,7 @@ def __init__( legacy_recv=False, klass=WebSocketServerProtocol, timeout=10, - compression='deflate', + compression="deflate", origins=None, extensions=None, subprotocols=None, @@ -802,9 +802,9 @@ def __init__( ws_server = WebSocketServer(loop) - secure = kwds.get('ssl') is not None + secure = kwds.get("ssl") is not None - if compression == 'deflate': + if compression == "deflate": if extensions is None: extensions = [] if not any( diff --git a/src/websockets/uri.py b/src/websockets/uri.py index d793fc6aa..b6e1ad0ce 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -12,10 +12,10 @@ from .exceptions import InvalidURI -__all__ = ['parse_uri', 'WebSocketURI'] +__all__ = ["parse_uri", "WebSocketURI"] WebSocketURI = collections.namedtuple( - 'WebSocketURI', ['secure', 'host', 'port', 'resource_name', 'user_info'] + "WebSocketURI", ["secure", "host", "port", "resource_name", "user_info"] ) WebSocketURI.__doc__ = """WebSocket URI. @@ -42,19 +42,19 @@ def parse_uri(uri): """ uri = urllib.parse.urlparse(uri) try: - assert uri.scheme in ['ws', 'wss'] - assert uri.params == '' - assert uri.fragment == '' + assert uri.scheme in ["ws", "wss"] + assert uri.params == "" + assert uri.fragment == "" assert uri.hostname is not None except AssertionError as exc: raise InvalidURI("{} isn't a valid URI".format(uri)) from exc - secure = uri.scheme == 'wss' + secure = uri.scheme == "wss" host = uri.hostname port = uri.port or (443 if secure else 80) - resource_name = uri.path or '/' + resource_name = uri.path or "/" if uri.query: - resource_name += '?' + uri.query + resource_name += "?" + uri.query user_info = None if uri.username or uri.password: user_info = (uri.username, uri.password) diff --git a/src/websockets/utils.py b/src/websockets/utils.py index def997841..193f8fc32 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -1,7 +1,7 @@ import itertools -__all__ = ['apply_mask'] +__all__ = ["apply_mask"] def apply_mask(data, mask): diff --git a/src/websockets/version.py b/src/websockets/version.py index fe9ed183b..96b948d8a 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = '7.0' +version = "7.0" diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index 0b7b78eae..80003ca2d 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -39,7 +39,7 @@ def assertExtensionEqual(self, extension1, extension2): class ClientPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin): def test_name(self): - assert ClientPerMessageDeflateFactory.name == 'permessage-deflate' + assert ClientPerMessageDeflateFactory.name == "permessage-deflate" def test_init(self): for config in [ @@ -48,7 +48,7 @@ def test_init(self): (True, False, None, 8), # client_max_window_bits ≥ 8 (True, True, None, 15), # client_max_window_bits ≤ 15 (False, False, None, True), # client_max_window_bits - (False, False, None, None, {'memLevel': 4}), + (False, False, None, None, {"memLevel": 4}), ]: with self.subTest(config=config): # This does not raise an exception. @@ -61,7 +61,7 @@ def test_init_error(self): (True, False, 16, 15), # server_max_window_bits > 15 (True, True, 15, 16), # client_max_window_bits > 15 (False, False, True, None), # server_max_window_bits - (False, False, None, None, {'wbits': 11}), + (False, False, None, None, {"wbits": 11}), ]: with self.subTest(config=config): with self.assertRaises(ValueError): @@ -72,22 +72,22 @@ def test_get_request_params(self): # Test without any parameter ((False, False, None, None), []), # Test server_no_context_takeover - ((True, False, None, None), [('server_no_context_takeover', None)]), + ((True, False, None, None), [("server_no_context_takeover", None)]), # Test client_no_context_takeover - ((False, True, None, None), [('client_no_context_takeover', None)]), + ((False, True, None, None), [("client_no_context_takeover", None)]), # Test server_max_window_bits - ((False, False, 10, None), [('server_max_window_bits', '10')]), + ((False, False, 10, None), [("server_max_window_bits", "10")]), # Test client_max_window_bits - ((False, False, None, 10), [('client_max_window_bits', '10')]), - ((False, False, None, True), [('client_max_window_bits', None)]), + ((False, False, None, 10), [("client_max_window_bits", "10")]), + ((False, False, None, True), [("client_max_window_bits", None)]), # Test all parameters together ( (True, True, 12, 12), [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '12'), - ('client_max_window_bits', '12'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "12"), + ("client_max_window_bits", "12"), ], ), ]: @@ -99,167 +99,167 @@ def test_process_response_params(self): for config, response_params, result in [ # Test without any parameter ((False, False, None, None), [], (False, False, 15, 15)), - ((False, False, None, None), [('unknown', None)], InvalidParameterName), + ((False, False, None, None), [("unknown", None)], InvalidParameterName), # Test server_no_context_takeover ( (False, False, None, None), - [('server_no_context_takeover', None)], + [("server_no_context_takeover", None)], (True, False, 15, 15), ), ((True, False, None, None), [], NegotiationError), ( (True, False, None, None), - [('server_no_context_takeover', None)], + [("server_no_context_takeover", None)], (True, False, 15, 15), ), ( (True, False, None, None), - [('server_no_context_takeover', None)] * 2, + [("server_no_context_takeover", None)] * 2, DuplicateParameter, ), ( (True, False, None, None), - [('server_no_context_takeover', '42')], + [("server_no_context_takeover", "42")], InvalidParameterValue, ), # Test client_no_context_takeover ( (False, False, None, None), - [('client_no_context_takeover', None)], + [("client_no_context_takeover", None)], (False, True, 15, 15), ), ((False, True, None, None), [], (False, True, 15, 15)), ( (False, True, None, None), - [('client_no_context_takeover', None)], + [("client_no_context_takeover", None)], (False, True, 15, 15), ), ( (False, True, None, None), - [('client_no_context_takeover', None)] * 2, + [("client_no_context_takeover", None)] * 2, DuplicateParameter, ), ( (False, True, None, None), - [('client_no_context_takeover', '42')], + [("client_no_context_takeover", "42")], InvalidParameterValue, ), # Test server_max_window_bits ( (False, False, None, None), - [('server_max_window_bits', '7')], + [("server_max_window_bits", "7")], NegotiationError, ), ( (False, False, None, None), - [('server_max_window_bits', '10')], + [("server_max_window_bits", "10")], (False, False, 10, 15), ), ( (False, False, None, None), - [('server_max_window_bits', '16')], + [("server_max_window_bits", "16")], NegotiationError, ), ((False, False, 12, None), [], NegotiationError), ( (False, False, 12, None), - [('server_max_window_bits', '10')], + [("server_max_window_bits", "10")], (False, False, 10, 15), ), ( (False, False, 12, None), - [('server_max_window_bits', '12')], + [("server_max_window_bits", "12")], (False, False, 12, 15), ), ( (False, False, 12, None), - [('server_max_window_bits', '13')], + [("server_max_window_bits", "13")], NegotiationError, ), ( (False, False, 12, None), - [('server_max_window_bits', '12')] * 2, + [("server_max_window_bits", "12")] * 2, DuplicateParameter, ), ( (False, False, 12, None), - [('server_max_window_bits', '42')], + [("server_max_window_bits", "42")], InvalidParameterValue, ), # Test client_max_window_bits ( (False, False, None, None), - [('client_max_window_bits', '10')], + [("client_max_window_bits", "10")], NegotiationError, ), ((False, False, None, True), [], (False, False, 15, 15)), ( (False, False, None, True), - [('client_max_window_bits', '7')], + [("client_max_window_bits", "7")], NegotiationError, ), ( (False, False, None, True), - [('client_max_window_bits', '10')], + [("client_max_window_bits", "10")], (False, False, 15, 10), ), ( (False, False, None, True), - [('client_max_window_bits', '16')], + [("client_max_window_bits", "16")], NegotiationError, ), ((False, False, None, 12), [], (False, False, 15, 12)), ( (False, False, None, 12), - [('client_max_window_bits', '10')], + [("client_max_window_bits", "10")], (False, False, 15, 10), ), ( (False, False, None, 12), - [('client_max_window_bits', '12')], + [("client_max_window_bits", "12")], (False, False, 15, 12), ), ( (False, False, None, 12), - [('client_max_window_bits', '13')], + [("client_max_window_bits", "13")], NegotiationError, ), ( (False, False, None, 12), - [('client_max_window_bits', '12')] * 2, + [("client_max_window_bits", "12")] * 2, DuplicateParameter, ), ( (False, False, None, 12), - [('client_max_window_bits', '42')], + [("client_max_window_bits", "42")], InvalidParameterValue, ), # Test all parameters together ( (True, True, 12, 12), [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], (True, True, 10, 10), ), ( (False, False, None, True), [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], (True, True, 10, 10), ), ( (True, True, 12, 12), [ - ('server_no_context_takeover', None), - ('server_max_window_bits', '12'), + ("server_no_context_takeover", None), + ("server_max_window_bits", "12"), ], (True, True, 12, 12), ), @@ -284,7 +284,7 @@ def test_process_response_params_deduplication(self): class ServerPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin): def test_name(self): - assert ServerPerMessageDeflateFactory.name == 'permessage-deflate' + assert ServerPerMessageDeflateFactory.name == "permessage-deflate" def test_init(self): for config in [ @@ -292,7 +292,7 @@ def test_init(self): (False, True, 15, None), # server_max_window_bits ≤ 15 (True, False, None, 8), # client_max_window_bits ≥ 8 (True, True, None, 15), # client_max_window_bits ≤ 15 - (False, False, None, None, {'memLevel': 4}), + (False, False, None, None, {"memLevel": 4}), ]: with self.subTest(config=config): # This does not raise an exception. @@ -306,7 +306,7 @@ def test_init_error(self): (True, True, 15, 16), # client_max_window_bits > 15 (False, False, None, True), # client_max_window_bits (False, False, True, None), # server_max_window_bits - (False, False, None, None, {'wbits': 11}), + (False, False, None, None, {"wbits": 11}), ]: with self.subTest(config=config): with self.assertRaises(ValueError): @@ -320,186 +320,186 @@ def test_process_request_params(self): ((False, False, None, None), [], [], (False, False, 15, 15)), ( (False, False, None, None), - [('unknown', None)], + [("unknown", None)], None, InvalidParameterName, ), # Test server_no_context_takeover ( (False, False, None, None), - [('server_no_context_takeover', None)], - [('server_no_context_takeover', None)], + [("server_no_context_takeover", None)], + [("server_no_context_takeover", None)], (False, True, 15, 15), ), ( (True, False, None, None), [], - [('server_no_context_takeover', None)], + [("server_no_context_takeover", None)], (False, True, 15, 15), ), ( (True, False, None, None), - [('server_no_context_takeover', None)], - [('server_no_context_takeover', None)], + [("server_no_context_takeover", None)], + [("server_no_context_takeover", None)], (False, True, 15, 15), ), ( (True, False, None, None), - [('server_no_context_takeover', None)] * 2, + [("server_no_context_takeover", None)] * 2, None, DuplicateParameter, ), ( (True, False, None, None), - [('server_no_context_takeover', '42')], + [("server_no_context_takeover", "42")], None, InvalidParameterValue, ), # Test client_no_context_takeover ( (False, False, None, None), - [('client_no_context_takeover', None)], - [('client_no_context_takeover', None)], # doesn't matter + [("client_no_context_takeover", None)], + [("client_no_context_takeover", None)], # doesn't matter (True, False, 15, 15), ), ( (False, True, None, None), [], - [('client_no_context_takeover', None)], + [("client_no_context_takeover", None)], (True, False, 15, 15), ), ( (False, True, None, None), - [('client_no_context_takeover', None)], - [('client_no_context_takeover', None)], # doesn't matter + [("client_no_context_takeover", None)], + [("client_no_context_takeover", None)], # doesn't matter (True, False, 15, 15), ), ( (False, True, None, None), - [('client_no_context_takeover', None)] * 2, + [("client_no_context_takeover", None)] * 2, None, DuplicateParameter, ), ( (False, True, None, None), - [('client_no_context_takeover', '42')], + [("client_no_context_takeover", "42")], None, InvalidParameterValue, ), # Test server_max_window_bits ( (False, False, None, None), - [('server_max_window_bits', '7')], + [("server_max_window_bits", "7")], None, NegotiationError, ), ( (False, False, None, None), - [('server_max_window_bits', '10')], - [('server_max_window_bits', '10')], + [("server_max_window_bits", "10")], + [("server_max_window_bits", "10")], (False, False, 15, 10), ), ( (False, False, None, None), - [('server_max_window_bits', '16')], + [("server_max_window_bits", "16")], None, NegotiationError, ), ( (False, False, 12, None), [], - [('server_max_window_bits', '12')], + [("server_max_window_bits", "12")], (False, False, 15, 12), ), ( (False, False, 12, None), - [('server_max_window_bits', '10')], - [('server_max_window_bits', '10')], + [("server_max_window_bits", "10")], + [("server_max_window_bits", "10")], (False, False, 15, 10), ), ( (False, False, 12, None), - [('server_max_window_bits', '12')], - [('server_max_window_bits', '12')], + [("server_max_window_bits", "12")], + [("server_max_window_bits", "12")], (False, False, 15, 12), ), ( (False, False, 12, None), - [('server_max_window_bits', '13')], - [('server_max_window_bits', '12')], + [("server_max_window_bits", "13")], + [("server_max_window_bits", "12")], (False, False, 15, 12), ), ( (False, False, 12, None), - [('server_max_window_bits', '12')] * 2, + [("server_max_window_bits", "12")] * 2, None, DuplicateParameter, ), ( (False, False, 12, None), - [('server_max_window_bits', '42')], + [("server_max_window_bits", "42")], None, InvalidParameterValue, ), # Test client_max_window_bits ( (False, False, None, None), - [('client_max_window_bits', None)], + [("client_max_window_bits", None)], [], (False, False, 15, 15), ), ( (False, False, None, None), - [('client_max_window_bits', '7')], + [("client_max_window_bits", "7")], None, InvalidParameterValue, ), ( (False, False, None, None), - [('client_max_window_bits', '10')], - [('client_max_window_bits', '10')], # doesn't matter + [("client_max_window_bits", "10")], + [("client_max_window_bits", "10")], # doesn't matter (False, False, 10, 15), ), ( (False, False, None, None), - [('client_max_window_bits', '16')], + [("client_max_window_bits", "16")], None, InvalidParameterValue, ), ((False, False, None, 12), [], None, NegotiationError), ( (False, False, None, 12), - [('client_max_window_bits', None)], - [('client_max_window_bits', '12')], + [("client_max_window_bits", None)], + [("client_max_window_bits", "12")], (False, False, 12, 15), ), ( (False, False, None, 12), - [('client_max_window_bits', '10')], - [('client_max_window_bits', '10')], + [("client_max_window_bits", "10")], + [("client_max_window_bits", "10")], (False, False, 10, 15), ), ( (False, False, None, 12), - [('client_max_window_bits', '12')], - [('client_max_window_bits', '12')], # doesn't matter + [("client_max_window_bits", "12")], + [("client_max_window_bits", "12")], # doesn't matter (False, False, 12, 15), ), ( (False, False, None, 12), - [('client_max_window_bits', '13')], - [('client_max_window_bits', '12')], # doesn't matter + [("client_max_window_bits", "13")], + [("client_max_window_bits", "12")], # doesn't matter (False, False, 12, 15), ), ( (False, False, None, 12), - [('client_max_window_bits', '12')] * 2, + [("client_max_window_bits", "12")] * 2, None, DuplicateParameter, ), ( (False, False, None, 12), - [('client_max_window_bits', '42')], + [("client_max_window_bits", "42")], None, InvalidParameterValue, ), @@ -507,43 +507,43 @@ def test_process_request_params(self): ( (True, True, 12, 12), [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], (True, True, 10, 10), ), ( (False, False, None, None), [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '10'), - ('client_max_window_bits', '10'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "10"), + ("client_max_window_bits", "10"), ], (True, True, 10, 10), ), ( (True, True, 12, 12), - [('client_max_window_bits', None)], + [("client_max_window_bits", None)], [ - ('server_no_context_takeover', None), - ('client_no_context_takeover', None), - ('server_max_window_bits', '12'), - ('client_max_window_bits', '12'), + ("server_no_context_takeover", None), + ("client_no_context_takeover", None), + ("server_max_window_bits", "12"), + ("client_max_window_bits", "12"), ], (True, True, 12, 12), ), @@ -581,7 +581,7 @@ def setUp(self): self.extension = PerMessageDeflate(False, False, 15, 15) def test_name(self): - assert self.extension.name == 'permessage-deflate' + assert self.extension.name == "permessage-deflate" def test_repr(self): self.assertExtensionEqual(eval(repr(self.extension)), self.extension) @@ -589,21 +589,21 @@ def test_repr(self): # Control frames aren't encoded or decoded. def test_no_encode_decode_ping_frame(self): - frame = Frame(True, OP_PING, b'') + frame = Frame(True, OP_PING, b"") self.assertEqual(self.extension.encode(frame), frame) self.assertEqual(self.extension.decode(frame), frame) def test_no_encode_decode_pong_frame(self): - frame = Frame(True, OP_PONG, b'') + frame = Frame(True, OP_PONG, b"") self.assertEqual(self.extension.encode(frame), frame) self.assertEqual(self.extension.decode(frame), frame) def test_no_encode_decode_close_frame(self): - frame = Frame(True, OP_CLOSE, serialize_close(1000, '')) + frame = Frame(True, OP_CLOSE, serialize_close(1000, "")) self.assertEqual(self.extension.encode(frame), frame) @@ -612,31 +612,31 @@ def test_no_encode_decode_close_frame(self): # Data frames are encoded and decoded. def test_encode_decode_text_frame(self): - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) enc_frame = self.extension.encode(frame) - self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b'JNL;\xbc\x12\x00')) + self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"JNL;\xbc\x12\x00")) dec_frame = self.extension.decode(enc_frame) self.assertEqual(dec_frame, frame) def test_encode_decode_binary_frame(self): - frame = Frame(True, OP_BINARY, b'tea') + frame = Frame(True, OP_BINARY, b"tea") enc_frame = self.extension.encode(frame) - self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b'*IM\x04\x00')) + self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"*IM\x04\x00")) dec_frame = self.extension.decode(enc_frame) self.assertEqual(dec_frame, frame) def test_encode_decode_fragmented_text_frame(self): - frame1 = Frame(False, OP_TEXT, 'café'.encode('utf-8')) - frame2 = Frame(False, OP_CONT, ' & '.encode('utf-8')) - frame3 = Frame(True, OP_CONT, 'croissants'.encode('utf-8')) + frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) + frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) + frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) enc_frame1 = self.extension.encode(frame1) enc_frame2 = self.extension.encode(frame2) @@ -644,13 +644,13 @@ def test_encode_decode_fragmented_text_frame(self): self.assertEqual( enc_frame1, - frame1._replace(rsv1=True, data=b'JNL;\xbc\x12\x00\x00\x00\xff\xff'), + frame1._replace(rsv1=True, data=b"JNL;\xbc\x12\x00\x00\x00\xff\xff"), ) self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b'RPS\x00\x00\x00\x00\xff\xff') + enc_frame2, frame2._replace(rsv1=True, data=b"RPS\x00\x00\x00\x00\xff\xff") ) self.assertEqual( - enc_frame3, frame3._replace(rsv1=True, data=b'J.\xca\xcf,.N\xcc+)\x06\x00') + enc_frame3, frame3._replace(rsv1=True, data=b"J.\xca\xcf,.N\xcc+)\x06\x00") ) dec_frame1 = self.extension.decode(enc_frame1) @@ -662,17 +662,17 @@ def test_encode_decode_fragmented_text_frame(self): self.assertEqual(dec_frame3, frame3) def test_encode_decode_fragmented_binary_frame(self): - frame1 = Frame(False, OP_TEXT, b'tea ') - frame2 = Frame(True, OP_CONT, b'time') + frame1 = Frame(False, OP_TEXT, b"tea ") + frame2 = Frame(True, OP_CONT, b"time") enc_frame1 = self.extension.encode(frame1) enc_frame2 = self.extension.encode(frame2) self.assertEqual( - enc_frame1, frame1._replace(rsv1=True, data=b'*IMT\x00\x00\x00\x00\xff\xff') + enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff") ) self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b'*\xc9\xccM\x05\x00') + enc_frame2, frame2._replace(rsv1=True, data=b"*\xc9\xccM\x05\x00") ) dec_frame1 = self.extension.decode(enc_frame1) @@ -682,21 +682,21 @@ def test_encode_decode_fragmented_binary_frame(self): self.assertEqual(dec_frame2, frame2) def test_no_decode_text_frame(self): - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) # Try decoding a frame that wasn't encoded. self.assertEqual(self.extension.decode(frame), frame) def test_no_decode_binary_frame(self): - frame = Frame(True, OP_TEXT, b'tea') + frame = Frame(True, OP_TEXT, b"tea") # Try decoding a frame that wasn't encoded. self.assertEqual(self.extension.decode(frame), frame) def test_no_decode_fragmented_text_frame(self): - frame1 = Frame(False, OP_TEXT, 'café'.encode('utf-8')) - frame2 = Frame(False, OP_CONT, ' & '.encode('utf-8')) - frame3 = Frame(True, OP_CONT, 'croissants'.encode('utf-8')) + frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) + frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) + frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) dec_frame1 = self.extension.decode(frame1) dec_frame2 = self.extension.decode(frame2) @@ -707,8 +707,8 @@ def test_no_decode_fragmented_text_frame(self): self.assertEqual(dec_frame3, frame3) def test_no_decode_fragmented_binary_frame(self): - frame1 = Frame(False, OP_TEXT, b'tea ') - frame2 = Frame(True, OP_CONT, b'time') + frame1 = Frame(False, OP_TEXT, b"tea ") + frame2 = Frame(True, OP_CONT, b"time") dec_frame1 = self.extension.decode(frame1) dec_frame2 = self.extension.decode(frame2) @@ -717,25 +717,25 @@ def test_no_decode_fragmented_binary_frame(self): self.assertEqual(dec_frame2, frame2) def test_context_takeover(self): - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) enc_frame1 = self.extension.encode(frame) enc_frame2 = self.extension.encode(frame) - self.assertEqual(enc_frame1.data, b'JNL;\xbc\x12\x00') - self.assertEqual(enc_frame2.data, b'J\x06\x11\x00\x00') + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") def test_remote_no_context_takeover(self): # No context takeover when decoding messages. self.extension = PerMessageDeflate(True, False, 15, 15) - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) enc_frame1 = self.extension.encode(frame) enc_frame2 = self.extension.encode(frame) - self.assertEqual(enc_frame1.data, b'JNL;\xbc\x12\x00') - self.assertEqual(enc_frame2.data, b'J\x06\x11\x00\x00') + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") dec_frame1 = self.extension.decode(enc_frame1) self.assertEqual(dec_frame1, frame) @@ -748,13 +748,13 @@ def test_local_no_context_takeover(self): # No context takeover when encoding and decoding messages. self.extension = PerMessageDeflate(True, True, 15, 15) - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) enc_frame1 = self.extension.encode(frame) enc_frame2 = self.extension.encode(frame) - self.assertEqual(enc_frame1.data, b'JNL;\xbc\x12\x00') - self.assertEqual(enc_frame2.data, b'JNL;\xbc\x12\x00') + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"JNL;\xbc\x12\x00") dec_frame1 = self.extension.decode(enc_frame1) dec_frame2 = self.extension.decode(enc_frame2) @@ -766,27 +766,27 @@ def test_local_no_context_takeover(self): def test_compress_settings(self): # Configure an extension so that no compression actually occurs. - extension = PerMessageDeflate(False, False, 15, 15, {'level': 0}) + extension = PerMessageDeflate(False, False, 15, 15, {"level": 0}) - frame = Frame(True, OP_TEXT, 'café'.encode('utf-8')) + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) enc_frame = extension.encode(frame) self.assertEqual( enc_frame, frame._replace( - rsv1=True, data=b'\x00\x05\x00\xfa\xffcaf\xc3\xa9\x00' # not compressed + rsv1=True, data=b"\x00\x05\x00\xfa\xffcaf\xc3\xa9\x00" # not compressed ), ) # Frames aren't decoded beyond max_length. def test_decompress_max_size(self): - frame = Frame(True, OP_TEXT, ('a' * 20).encode('utf-8')) + frame = Frame(True, OP_TEXT, ("a" * 20).encode("utf-8")) enc_frame = self.extension.encode(frame) - self.assertEqual(enc_frame.data, b'JL\xc4\x04\x00\x00') + self.assertEqual(enc_frame.data, b"JL\xc4\x04\x00\x00") with self.assertRaises(PayloadTooBig): self.extension.decode(enc_frame, max_size=10) diff --git a/tests/py35/_test_client_server.py b/tests/py35/_test_client_server.py index 46e9111a5..869c379b8 100644 --- a/tests/py35/_test_client_server.py +++ b/tests/py35/_test_client_server.py @@ -23,7 +23,7 @@ def tearDown(self): self.loop.close() def test_client(self): - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) async def run_client(): @@ -41,7 +41,7 @@ async def run_client(): def test_server(self): async def run_server(): # Await serve. - server = await serve(handler, 'localhost', 0) + server = await serve(handler, "localhost", 0) self.assertTrue(server.sockets) server.close() await server.wait_closed() @@ -60,10 +60,10 @@ def tearDown(self): # Asynchronous context managers are only enabled on Python ≥ 3.5.1. @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), 'this test requires Python 3.5.1+' + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" ) def test_client(self): - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) async def run_client(): @@ -81,12 +81,12 @@ async def run_client(): # Asynchronous context managers are only enabled on Python ≥ 3.5.1. @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), 'this test requires Python 3.5.1+' + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" ) def test_server(self): async def run_server(): # Use serve as an asynchronous context manager. - async with serve(handler, 'localhost', 0) as server: + async with serve(handler, "localhost", 0) as server: self.assertTrue(server.sockets) # Check that exiting the context manager closed the server. @@ -96,9 +96,9 @@ async def run_server(): # Asynchronous context managers are only enabled on Python ≥ 3.5.1. @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), 'this test requires Python 3.5.1+' + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" ) - @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'this test requires Unix sockets') + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets") def test_unix_server(self): async def run_server(path): async with unix_serve(handler, path) as server: @@ -108,5 +108,5 @@ async def run_server(path): self.assertFalse(server.sockets) with tempfile.TemporaryDirectory() as temp_dir: - path = bytes(pathlib.Path(temp_dir) / 'websockets') + path = bytes(pathlib.Path(temp_dir) / "websockets") self.loop.run_until_complete(run_server(path)) diff --git a/tests/py36/_test_client_server.py b/tests/py36/_test_client_server.py index f38fbe6f6..10b135cc9 100644 --- a/tests/py36/_test_client_server.py +++ b/tests/py36/_test_client_server.py @@ -17,7 +17,7 @@ raise ImportError("Python 3.6+ only") -MESSAGES = ['3', '2', '1', 'Fire!'] +MESSAGES = ["3", "2", "1", "Fire!"] class AsyncIteratorTests(unittest.TestCase): @@ -37,7 +37,7 @@ async def handler(ws, path): for message in MESSAGES: await ws.send(message) - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) messages = [] @@ -61,7 +61,7 @@ async def handler(ws, path): await ws.send(message) await ws.close(1001) - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) messages = [] @@ -85,7 +85,7 @@ async def handler(ws, path): await ws.send(message) await ws.close(1011) - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) messages = [] diff --git a/tests/test_client_server.py b/tests/test_client_server.py index eade7e066..9ba2725d9 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -55,25 +55,25 @@ # $ cat test_localhost.key test_localhost.crt > test_localhost.pem # $ rm test_localhost.key test_localhost.crt -testcert = bytes(pathlib.Path(__file__).with_name('test_localhost.pem')) +testcert = bytes(pathlib.Path(__file__).with_name("test_localhost.pem")) @asyncio.coroutine def handler(ws, path): - if path == '/attributes': + if path == "/attributes": yield from ws.send(repr((ws.host, ws.port, ws.secure))) - elif path == '/close_timeout': + elif path == "/close_timeout": yield from ws.send(repr(ws.close_timeout)) - elif path == '/path': + elif path == "/path": yield from ws.send(str(ws.path)) - elif path == '/headers': + elif path == "/headers": yield from ws.send(repr(ws.request_headers)) yield from ws.send(repr(ws.response_headers)) - elif path == '/extensions': + elif path == "/extensions": yield from ws.send(repr(ws.extensions)) - elif path == '/subprotocol': + elif path == "/subprotocol": yield from ws.send(repr(ws.subprotocol)) - elif path == '/slow_stop': + elif path == "/slow_stop": yield from ws.wait_closed() yield from asyncio.sleep(2 * MS) else: @@ -142,14 +142,14 @@ def with_client(*args, **kwds): return with_manager(temp_test_client, *args, **kwds) -def get_server_uri(server, secure=False, resource_name='/', user_info=None): +def get_server_uri(server, secure=False, resource_name="/", user_info=None): """ Return a WebSocket URI for connecting to the given server. """ - proto = 'wss' if secure else 'ws' + proto = "wss" if secure else "ws" - user_info = ':'.join(user_info) + '@' if user_info else '' + user_info = ":".join(user_info) + "@" if user_info else "" # Pick a random socket in order to test both IPv4 and IPv6 on systems # where both are available. Randomizing tests is usually a bad idea. If @@ -158,38 +158,38 @@ def get_server_uri(server, secure=False, resource_name='/', user_info=None): if server_socket.family == socket.AF_INET6: # pragma: no cover host, port = server_socket.getsockname()[:2] # (no IPv6 on CI) - host = '[{}]'.format(host) + host = "[{}]".format(host) elif server_socket.family == socket.AF_INET: host, port = server_socket.getsockname() elif server_socket.family == socket.AF_UNIX: # The host and port are ignored when connecting to a Unix socket. - host, port = 'localhost', 0 + host, port = "localhost", 0 else: # pragma: no cover raise ValueError("Expected an IPv6, IPv4, or Unix socket") - return '{}://{}{}:{}{}'.format(proto, user_info, host, port, resource_name) + return "{}://{}{}:{}{}".format(proto, user_info, host, port, resource_name) class UnauthorizedServerProtocol(WebSocketServerProtocol): @asyncio.coroutine def process_request(self, path, request_headers): # Test returning headers as a Headers instance (1/3) - return UNAUTHORIZED, Headers([('X-Access', 'denied')]), b'' + return UNAUTHORIZED, Headers([("X-Access", "denied")]), b"" class ForbiddenServerProtocol(WebSocketServerProtocol): @asyncio.coroutine def process_request(self, path, request_headers): # Test returning headers as a dict (2/3) - return FORBIDDEN, {'X-Access': 'denied'}, b'' + return FORBIDDEN, {"X-Access": "denied"}, b"" class HealthCheckServerProtocol(WebSocketServerProtocol): @asyncio.coroutine def process_request(self, path, request_headers): # Test returning headers as a list of pairs (3/3) - if path == '/__health__/': - return OK, [('X-Access', 'OK')], b'status = green\n' + if path == "/__health__/": + return OK, [("X-Access", "OK")], b"status = green\n" class SlowServerProtocol(WebSocketServerProtocol): @@ -207,7 +207,7 @@ class BarClientProtocol(WebSocketClientProtocol): class ClientNoOpExtensionFactory: - name = 'x-no-op' + name = "x-no-op" def get_request_params(self): return [] @@ -219,7 +219,7 @@ def process_response_params(self, params, accepted_extensions): class ServerNoOpExtensionFactory: - name = 'x-no-op' + name = "x-no-op" def __init__(self, params=None): self.params = params or [] @@ -229,10 +229,10 @@ def process_request_params(self, params, accepted_extensions): class NoOpExtension: - name = 'x-no-op' + name = "x-no-op" def __repr__(self): - return 'NoOpExtension()' + return "NoOpExtension()" def decode(self, frame, *, max_size=None): return frame @@ -266,10 +266,10 @@ def server_context(self): def start_server(self, **kwds): # Disable compression by default in tests. - kwds.setdefault('compression', None) + kwds.setdefault("compression", None) # Disable pings by default in tests. - kwds.setdefault('ping_interval', None) - start_server = serve(handler, 'localhost', 0, **kwds) + kwds.setdefault("ping_interval", None) + start_server = serve(handler, "localhost", 0, **kwds) self.server = self.loop.run_until_complete(start_server) def start_redirecting_server( @@ -278,13 +278,13 @@ def start_redirecting_server( def _process_request(path, headers): server_uri = get_server_uri(self.server, self.secure, path) if force_insecure: - server_uri = server_uri.replace('wss:', 'ws:') - headers = {'Location': server_uri} if include_location else [] + server_uri = server_uri.replace("wss:", "ws:") + headers = {"Location": server_uri} if include_location else [] return status, headers, b"" start_server = serve( handler, - 'localhost', + "localhost", 0, compression=None, ping_interval=None, @@ -293,12 +293,12 @@ def _process_request(path, headers): ) self.redirecting_server = self.loop.run_until_complete(start_server) - def start_client(self, resource_name='/', user_info=None, **kwds): + def start_client(self, resource_name="/", user_info=None, **kwds): # Disable compression by default in tests. - kwds.setdefault('compression', None) + kwds.setdefault("compression", None) # Disable pings by default in tests. - kwds.setdefault('ping_interval', None) - secure = kwds.get('ssl') is not None + kwds.setdefault("ping_interval", None) + secure = kwds.get("ssl") is not None server = self.redirecting_server if self.redirecting_server else self.server server_uri = get_server_uri(server, secure, resource_name, user_info) start_client = connect(server_uri, **kwds) @@ -370,14 +370,14 @@ def test_infinite_redirect(self): self.server = self.redirecting_server with self.assertRaises(InvalidHandshake): with temp_test_client(self): - self.fail('Did not raise') # pragma: no cover + self.fail("Did not raise") # pragma: no cover @with_server() def test_redirect_missing_location(self): with temp_test_redirecting_server(self, FOUND, include_location=False): with self.assertRaises(InvalidMessage): with temp_test_client(self): - self.fail('Did not raise') # pragma: no cover + self.fail("Did not raise") # pragma: no cover def test_explicit_event_loop(self): with self.temp_server(loop=self.loop): @@ -388,7 +388,7 @@ def test_explicit_event_loop(self): # The way the legacy SSL implementation wraps sockets makes it extremely # hard to write a test for Python 3.4. - @unittest.skipIf(sys.version_info[:2] <= (3, 4), 'this test requires Python 3.5+') + @unittest.skipIf(sys.version_info[:2] <= (3, 4), "this test requires Python 3.5+") @with_server() def test_explicit_socket(self): class TrackedSocket(socket.socket): @@ -418,7 +418,7 @@ def send(self, *args, **kwargs): with self.temp_client( sock=client_socket, # "You must set server_hostname when using ssl without a host" - server_hostname='localhost' if self.secure else None, + server_hostname="localhost" if self.secure else None, ): self.loop.run_until_complete(self.client.send("Hello!")) reply = self.loop.run_until_complete(self.client.recv()) @@ -430,10 +430,10 @@ def send(self, *args, **kwargs): finally: client_socket.close() - @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'this test requires Unix sockets') + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets") def test_unix_socket(self): with tempfile.TemporaryDirectory() as temp_dir: - path = bytes(pathlib.Path(temp_dir) / 'websockets') + path = bytes(pathlib.Path(temp_dir) / "websockets") # Like self.start_server() but with unix_serve(). unix_server = unix_serve(handler, path) @@ -452,24 +452,24 @@ def test_unix_socket(self): client_socket.close() self.stop_server() - @with_server(process_request=lambda p, rh: (OK, [], b'OK\n')) + @with_server(process_request=lambda p, rh: (OK, [], b"OK\n")) def test_process_request_argument(self): - response = self.loop.run_until_complete(self.make_http_request('/')) + response = self.loop.run_until_complete(self.make_http_request("/")) with contextlib.closing(response): self.assertEqual(response.code, 200) @with_server( - subprotocols=['superchat', 'chat'], select_subprotocol=lambda cs, ss: 'chat' + subprotocols=["superchat", "chat"], select_subprotocol=lambda cs, ss: "chat" ) - @with_client('/subprotocol', subprotocols=['superchat', 'chat']) + @with_client("/subprotocol", subprotocols=["superchat", "chat"]) def test_select_subprotocol_argument(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) - self.assertEqual(server_subprotocol, repr('chat')) - self.assertEqual(self.client.subprotocol, 'chat') + self.assertEqual(server_subprotocol, repr("chat")) + self.assertEqual(self.client.subprotocol, "chat") @with_server() - @with_client('/attributes') + @with_client("/attributes") def test_protocol_attributes(self): # The test could be connecting with IPv6 or IPv4. expected_client_attrs = [ @@ -479,120 +479,120 @@ def test_protocol_attributes(self): client_attrs = (self.client.host, self.client.port, self.client.secure) self.assertIn(client_attrs, expected_client_attrs) - expected_server_attrs = ('localhost', 0, self.secure) + expected_server_attrs = ("localhost", 0, self.secure) server_attrs = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_attrs, repr(expected_server_attrs)) @with_server() - @with_client('/path') + @with_client("/path") def test_protocol_path(self): client_path = self.client.path - self.assertEqual(client_path, '/path') + self.assertEqual(client_path, "/path") server_path = self.loop.run_until_complete(self.client.recv()) - self.assertEqual(server_path, '/path') + self.assertEqual(server_path, "/path") @with_server() - @with_client('/headers', user_info=('user', 'pass')) + @with_client("/headers", user_info=("user", "pass")) def test_protocol_basic_auth(self): self.assertEqual( - self.client.request_headers['Authorization'], 'Basic dXNlcjpwYXNz' + self.client.request_headers["Authorization"], "Basic dXNlcjpwYXNz" ) @with_server() - @with_client('/headers') + @with_client("/headers") def test_protocol_headers(self): client_req = self.client.request_headers client_resp = self.client.response_headers - self.assertEqual(client_req['User-Agent'], USER_AGENT) - self.assertEqual(client_resp['Server'], USER_AGENT) + self.assertEqual(client_req["User-Agent"], USER_AGENT) + self.assertEqual(client_resp["Server"], USER_AGENT) server_req = self.loop.run_until_complete(self.client.recv()) server_resp = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_req, repr(client_req)) self.assertEqual(server_resp, repr(client_resp)) @with_server() - @with_client('/headers', extra_headers=Headers({'X-Spam': 'Eggs'})) + @with_client("/headers", extra_headers=Headers({"X-Spam": "Eggs"})) def test_protocol_custom_request_headers(self): req_headers = self.loop.run_until_complete(self.client.recv()) self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", req_headers) @with_server() - @with_client('/headers', extra_headers={'X-Spam': 'Eggs'}) + @with_client("/headers", extra_headers={"X-Spam": "Eggs"}) def test_protocol_custom_request_headers_dict(self): req_headers = self.loop.run_until_complete(self.client.recv()) self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", req_headers) @with_server() - @with_client('/headers', extra_headers=[('X-Spam', 'Eggs')]) + @with_client("/headers", extra_headers=[("X-Spam", "Eggs")]) def test_protocol_custom_request_headers_list(self): req_headers = self.loop.run_until_complete(self.client.recv()) self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", req_headers) @with_server() - @with_client('/headers', extra_headers=[('User-Agent', 'Eggs')]) + @with_client("/headers", extra_headers=[("User-Agent", "Eggs")]) def test_protocol_custom_request_user_agent(self): req_headers = self.loop.run_until_complete(self.client.recv()) self.loop.run_until_complete(self.client.recv()) self.assertEqual(req_headers.count("User-Agent"), 1) self.assertIn("('User-Agent', 'Eggs')", req_headers) - @with_server(extra_headers=lambda p, r: Headers({'X-Spam': 'Eggs'})) - @with_client('/headers') + @with_server(extra_headers=lambda p, r: Headers({"X-Spam": "Eggs"})) + @with_client("/headers") def test_protocol_custom_response_headers_callable(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers=lambda p, r: {'X-Spam': 'Eggs'}) - @with_client('/headers') + @with_server(extra_headers=lambda p, r: {"X-Spam": "Eggs"}) + @with_client("/headers") def test_protocol_custom_response_headers_callable_dict(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers=lambda p, r: [('X-Spam', 'Eggs')]) - @with_client('/headers') + @with_server(extra_headers=lambda p, r: [("X-Spam", "Eggs")]) + @with_client("/headers") def test_protocol_custom_response_headers_callable_list(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers=Headers({'X-Spam': 'Eggs'})) - @with_client('/headers') + @with_server(extra_headers=Headers({"X-Spam": "Eggs"})) + @with_client("/headers") def test_protocol_custom_response_headers(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers={'X-Spam': 'Eggs'}) - @with_client('/headers') + @with_server(extra_headers={"X-Spam": "Eggs"}) + @with_client("/headers") def test_protocol_custom_response_headers_dict(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers=[('X-Spam', 'Eggs')]) - @with_client('/headers') + @with_server(extra_headers=[("X-Spam", "Eggs")]) + @with_client("/headers") def test_protocol_custom_response_headers_list(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) - @with_server(extra_headers=[('Server', 'Eggs')]) - @with_client('/headers') + @with_server(extra_headers=[("Server", "Eggs")]) + @with_client("/headers") def test_protocol_custom_response_user_agent(self): self.loop.run_until_complete(self.client.recv()) resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertEqual(resp_headers.count("Server"), 1) self.assertIn("('Server', 'Eggs')", resp_headers) - def make_http_request(self, path='/'): + def make_http_request(self, path="/"): # Set url to 'https?://:'. url = get_server_uri(self.server, resource_name=path, secure=self.secure) - url = url.replace('ws', 'http') + url = url.replace("ws", "http") if self.secure: open_health_check = functools.partial( @@ -606,11 +606,11 @@ def make_http_request(self, path='/'): @with_server(create_protocol=HealthCheckServerProtocol) def test_http_request_http_endpoint(self): # Making a HTTP request to a HTTP endpoint succeeds. - response = self.loop.run_until_complete(self.make_http_request('/__health__/')) + response = self.loop.run_until_complete(self.make_http_request("/__health__/")) with contextlib.closing(response): self.assertEqual(response.code, 200) - self.assertEqual(response.read(), b'status = green\n') + self.assertEqual(response.read(), b"status = green\n") @with_server(create_protocol=HealthCheckServerProtocol) def test_http_request_ws_endpoint(self): @@ -619,13 +619,13 @@ def test_http_request_ws_endpoint(self): self.loop.run_until_complete(self.make_http_request()) self.assertEqual(raised.exception.code, 426) - self.assertEqual(raised.exception.headers['Upgrade'], 'websocket') + self.assertEqual(raised.exception.headers["Upgrade"], "websocket") @with_server(create_protocol=HealthCheckServerProtocol) def test_ws_connection_http_endpoint(self): # Making a WS connection to a HTTP endpoint fails. with self.assertRaises(InvalidStatusCode) as raised: - self.start_client('/__health__/') + self.start_client("/__health__/") self.assertEqual(raised.exception.status_code, 200) @@ -665,93 +665,93 @@ def test_server_create_protocol_over_klass(self): self.assert_client_raises_code(403) @with_server() - @with_client('/path', create_protocol=FooClientProtocol) + @with_client("/path", create_protocol=FooClientProtocol) def test_client_create_protocol(self): self.assertIsInstance(self.client, FooClientProtocol) @with_server() @with_client( - '/path', + "/path", create_protocol=(lambda *args, **kwargs: FooClientProtocol(*args, **kwargs)), ) def test_client_create_protocol_function(self): self.assertIsInstance(self.client, FooClientProtocol) @with_server() - @with_client('/path', klass=FooClientProtocol) + @with_client("/path", klass=FooClientProtocol) def test_client_klass(self): self.assertIsInstance(self.client, FooClientProtocol) @with_server() - @with_client('/path', create_protocol=BarClientProtocol, klass=FooClientProtocol) + @with_client("/path", create_protocol=BarClientProtocol, klass=FooClientProtocol) def test_client_create_protocol_over_klass(self): self.assertIsInstance(self.client, BarClientProtocol) @with_server(close_timeout=7) - @with_client('/close_timeout') + @with_client("/close_timeout") def test_server_close_timeout(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 7) @with_server(timeout=6) - @with_client('/close_timeout') + @with_client("/close_timeout") def test_server_timeout_backwards_compatibility(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 6) @with_server(close_timeout=7, timeout=6) - @with_client('/close_timeout') + @with_client("/close_timeout") def test_server_close_timeout_over_timeout(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 7) @with_server() - @with_client('/close_timeout', close_timeout=7) + @with_client("/close_timeout", close_timeout=7) def test_client_close_timeout(self): self.assertEqual(self.client.close_timeout, 7) @with_server() - @with_client('/close_timeout', timeout=6) + @with_client("/close_timeout", timeout=6) def test_client_timeout_backwards_compatibility(self): self.assertEqual(self.client.close_timeout, 6) @with_server() - @with_client('/close_timeout', close_timeout=7, timeout=6) + @with_client("/close_timeout", close_timeout=7, timeout=6) def test_client_close_timeout_over_timeout(self): self.assertEqual(self.client.close_timeout, 7) @with_server() - @with_client('/extensions') + @with_client("/extensions") def test_no_extension(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_extensions, repr([])) self.assertEqual(repr(self.client.extensions), repr([])) @with_server(extensions=[ServerNoOpExtensionFactory()]) - @with_client('/extensions', extensions=[ClientNoOpExtensionFactory()]) + @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()]) def test_extension(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_extensions, repr([NoOpExtension()])) self.assertEqual(repr(self.client.extensions), repr([NoOpExtension()])) @with_server() - @with_client('/extensions', extensions=[ClientNoOpExtensionFactory()]) + @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()]) def test_extension_not_accepted(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_extensions, repr([])) self.assertEqual(repr(self.client.extensions), repr([])) @with_server(extensions=[ServerNoOpExtensionFactory()]) - @with_client('/extensions') + @with_client("/extensions") def test_extension_not_requested(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_extensions, repr([])) self.assertEqual(repr(self.client.extensions), repr([])) - @with_server(extensions=[ServerNoOpExtensionFactory([('foo', None)])]) + @with_server(extensions=[ServerNoOpExtensionFactory([("foo", None)])]) def test_extension_client_rejection(self): with self.assertRaises(NegotiationError): - self.start_client('/extensions', extensions=[ClientNoOpExtensionFactory()]) + self.start_client("/extensions", extensions=[ClientNoOpExtensionFactory()]) @with_server( extensions=[ @@ -760,7 +760,7 @@ def test_extension_client_rejection(self): ServerPerMessageDeflateFactory(), ] ) - @with_client('/extensions', extensions=[ClientPerMessageDeflateFactory()]) + @with_client("/extensions", extensions=[ClientPerMessageDeflateFactory()]) def test_extension_no_match_then_match(self): # The order requested by the client has priority. server_extensions = self.loop.run_until_complete(self.client.recv()) @@ -773,7 +773,7 @@ def test_extension_no_match_then_match(self): ) @with_server(extensions=[ServerPerMessageDeflateFactory()]) - @with_client('/extensions', extensions=[ClientNoOpExtensionFactory()]) + @with_client("/extensions", extensions=[ClientNoOpExtensionFactory()]) def test_extension_mismatch(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_extensions, repr([])) @@ -783,7 +783,7 @@ def test_extension_mismatch(self): extensions=[ServerNoOpExtensionFactory(), ServerPerMessageDeflateFactory()] ) @with_client( - '/extensions', + "/extensions", extensions=[ClientPerMessageDeflateFactory(), ClientNoOpExtensionFactory()], ) def test_extension_order(self): @@ -799,25 +799,25 @@ def test_extension_order(self): ) @with_server(extensions=[ServerNoOpExtensionFactory()]) - @unittest.mock.patch.object(WebSocketServerProtocol, 'process_extensions') + @unittest.mock.patch.object(WebSocketServerProtocol, "process_extensions") def test_extensions_error(self, _process_extensions): - _process_extensions.return_value = 'x-no-op', [NoOpExtension()] + _process_extensions.return_value = "x-no-op", [NoOpExtension()] with self.assertRaises(NegotiationError): self.start_client( - '/extensions', extensions=[ClientPerMessageDeflateFactory()] + "/extensions", extensions=[ClientPerMessageDeflateFactory()] ) @with_server(extensions=[ServerNoOpExtensionFactory()]) - @unittest.mock.patch.object(WebSocketServerProtocol, 'process_extensions') + @unittest.mock.patch.object(WebSocketServerProtocol, "process_extensions") def test_extensions_error_no_extensions(self, _process_extensions): - _process_extensions.return_value = 'x-no-op', [NoOpExtension()] + _process_extensions.return_value = "x-no-op", [NoOpExtension()] with self.assertRaises(InvalidHandshake): - self.start_client('/extensions') + self.start_client("/extensions") - @with_server(compression='deflate') - @with_client('/extensions', compression='deflate') + @with_server(compression="deflate") + @with_client("/extensions", compression="deflate") def test_compression_deflate(self): server_extensions = self.loop.run_until_complete(self.client.recv()) self.assertEqual( @@ -834,16 +834,16 @@ def test_compression_deflate(self): client_no_context_takeover=True, server_max_window_bits=10 ) ], - compression='deflate', # overridden by explicit config + compression="deflate", # overridden by explicit config ) @with_client( - '/extensions', + "/extensions", extensions=[ ClientPerMessageDeflateFactory( server_no_context_takeover=True, client_max_window_bits=12 ) ], - compression='deflate', # overridden by explicit config + compression="deflate", # overridden by explicit config ) def test_compression_deflate_and_explicit_config(self): server_extensions = self.loop.run_until_complete(self.client.recv()) @@ -856,77 +856,77 @@ def test_compression_deflate_and_explicit_config(self): def test_compression_unsupported_server(self): with self.assertRaises(ValueError): - self.loop.run_until_complete(self.start_server(compression='xz')) + self.loop.run_until_complete(self.start_server(compression="xz")) @with_server() def test_compression_unsupported_client(self): with self.assertRaises(ValueError): - self.loop.run_until_complete(self.start_client(compression='xz')) + self.loop.run_until_complete(self.start_client(compression="xz")) @with_server() - @with_client('/subprotocol') + @with_client("/subprotocol") def test_no_subprotocol(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_subprotocol, repr(None)) self.assertEqual(self.client.subprotocol, None) - @with_server(subprotocols=['superchat', 'chat']) - @with_client('/subprotocol', subprotocols=['otherchat', 'chat']) + @with_server(subprotocols=["superchat", "chat"]) + @with_client("/subprotocol", subprotocols=["otherchat", "chat"]) def test_subprotocol(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) - self.assertEqual(server_subprotocol, repr('chat')) - self.assertEqual(self.client.subprotocol, 'chat') + self.assertEqual(server_subprotocol, repr("chat")) + self.assertEqual(self.client.subprotocol, "chat") - @with_server(subprotocols=['superchat']) - @with_client('/subprotocol', subprotocols=['otherchat']) + @with_server(subprotocols=["superchat"]) + @with_client("/subprotocol", subprotocols=["otherchat"]) def test_subprotocol_not_accepted(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_subprotocol, repr(None)) self.assertEqual(self.client.subprotocol, None) @with_server() - @with_client('/subprotocol', subprotocols=['otherchat', 'chat']) + @with_client("/subprotocol", subprotocols=["otherchat", "chat"]) def test_subprotocol_not_offered(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_subprotocol, repr(None)) self.assertEqual(self.client.subprotocol, None) - @with_server(subprotocols=['superchat', 'chat']) - @with_client('/subprotocol') + @with_server(subprotocols=["superchat", "chat"]) + @with_client("/subprotocol") def test_subprotocol_not_requested(self): server_subprotocol = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_subprotocol, repr(None)) self.assertEqual(self.client.subprotocol, None) - @with_server(subprotocols=['superchat']) - @unittest.mock.patch.object(WebSocketServerProtocol, 'process_subprotocol') + @with_server(subprotocols=["superchat"]) + @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol") def test_subprotocol_error(self, _process_subprotocol): - _process_subprotocol.return_value = 'superchat' + _process_subprotocol.return_value = "superchat" with self.assertRaises(NegotiationError): - self.start_client('/subprotocol', subprotocols=['otherchat']) + self.start_client("/subprotocol", subprotocols=["otherchat"]) self.run_loop_once() - @with_server(subprotocols=['superchat']) - @unittest.mock.patch.object(WebSocketServerProtocol, 'process_subprotocol') + @with_server(subprotocols=["superchat"]) + @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol") def test_subprotocol_error_no_subprotocols(self, _process_subprotocol): - _process_subprotocol.return_value = 'superchat' + _process_subprotocol.return_value = "superchat" with self.assertRaises(InvalidHandshake): - self.start_client('/subprotocol') + self.start_client("/subprotocol") self.run_loop_once() - @with_server(subprotocols=['superchat', 'chat']) - @unittest.mock.patch.object(WebSocketServerProtocol, 'process_subprotocol') + @with_server(subprotocols=["superchat", "chat"]) + @unittest.mock.patch.object(WebSocketServerProtocol, "process_subprotocol") def test_subprotocol_error_two_subprotocols(self, _process_subprotocol): - _process_subprotocol.return_value = 'superchat, chat' + _process_subprotocol.return_value = "superchat, chat" with self.assertRaises(InvalidHandshake): - self.start_client('/subprotocol', subprotocols=['superchat', 'chat']) + self.start_client("/subprotocol", subprotocols=["superchat", "chat"]) self.run_loop_once() @with_server() - @unittest.mock.patch('websockets.server.read_request') + @unittest.mock.patch("websockets.server.read_request") def test_server_receives_malformed_request(self, _read_request): _read_request.side_effect = ValueError("read_request failed") @@ -934,7 +934,7 @@ def test_server_receives_malformed_request(self, _read_request): self.start_client() @with_server() - @unittest.mock.patch('websockets.client.read_response') + @unittest.mock.patch("websockets.client.read_response") def test_client_receives_malformed_response(self, _read_response): _read_response.side_effect = ValueError("read_response failed") @@ -943,10 +943,10 @@ def test_client_receives_malformed_response(self, _read_response): self.run_loop_once() @with_server() - @unittest.mock.patch('websockets.client.build_request') + @unittest.mock.patch("websockets.client.build_request") def test_client_sends_invalid_handshake_request(self, _build_request): def wrong_build_request(headers): - return '42' + return "42" _build_request.side_effect = wrong_build_request @@ -954,10 +954,10 @@ def wrong_build_request(headers): self.start_client() @with_server() - @unittest.mock.patch('websockets.server.build_response') + @unittest.mock.patch("websockets.server.build_response") def test_server_sends_invalid_handshake_response(self, _build_response): def wrong_build_response(headers, key): - return build_response(headers, '42') + return build_response(headers, "42") _build_response.side_effect = wrong_build_response @@ -965,12 +965,12 @@ def wrong_build_response(headers, key): self.start_client() @with_server() - @unittest.mock.patch('websockets.client.read_response') + @unittest.mock.patch("websockets.client.read_response") def test_server_does_not_switch_protocols(self, _read_response): @asyncio.coroutine def wrong_read_response(stream): status_code, reason, headers = yield from read_response(stream) - return 400, 'Bad Request', headers + return 400, "Bad Request", headers _read_response.side_effect = wrong_read_response @@ -979,7 +979,7 @@ def wrong_read_response(stream): self.run_loop_once() @with_server() - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.process_request') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.process_request") def test_server_error_in_handshake(self, _process_request): _process_request.side_effect = Exception("process_request crashed") @@ -987,7 +987,7 @@ def test_server_error_in_handshake(self, _process_request): self.start_client() @with_server() - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.send') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.send") def test_server_handler_crashes(self, send): send.side_effect = ValueError("send failed") @@ -1000,7 +1000,7 @@ def test_server_handler_crashes(self, send): self.assertEqual(self.client.close_code, 1011) @with_server() - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.close') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") def test_server_close_crashes(self, close): close.side_effect = ValueError("close failed") @@ -1014,7 +1014,7 @@ def test_server_close_crashes(self, close): @with_server() @with_client() - @unittest.mock.patch.object(WebSocketClientProtocol, 'handshake') + @unittest.mock.patch.object(WebSocketClientProtocol, "handshake") def test_client_closes_connection_before_handshake(self, handshake): # We have mocked the handshake() method to prevent the client from # performing the opening handshake. Force it to close the connection. @@ -1042,7 +1042,7 @@ def test_server_shuts_down_during_connection_handling(self): self.assertEqual(self.client.close_code, 1001) @with_server() - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.close') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") def test_server_shuts_down_during_connection_close(self, _close): _close.side_effect = asyncio.CancelledError @@ -1059,7 +1059,7 @@ def test_server_shuts_down_during_connection_close(self, _close): def test_server_shuts_down_waits_until_handlers_terminate(self): # This handler waits a bit after the connection is closed in order # to test that wait_closed() really waits for handlers to complete. - self.start_client('/slow_stop') + self.start_client("/slow_stop") server_ws = next(iter(self.server.websockets)) # Test that the handler task keeps running after close(). @@ -1081,9 +1081,9 @@ def test_invalid_status_error_during_client_connect(self): @with_server() @unittest.mock.patch( - 'websockets.server.WebSocketServerProtocol.write_http_response' + "websockets.server.WebSocketServerProtocol.write_http_response" ) - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.read_http_request') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.read_http_request") def test_connection_error_during_opening_handshake( self, _read_http_request, _write_http_response ): @@ -1101,7 +1101,7 @@ def test_connection_error_during_opening_handshake( _write_http_response.assert_not_called() @with_server() - @unittest.mock.patch('websockets.server.WebSocketServerProtocol.close') + @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") def test_connection_error_during_closing_handshake(self, close): close.side_effect = ConnectionError @@ -1139,11 +1139,11 @@ def client_context(self): return ssl_context def start_server(self, **kwds): - kwds.setdefault('ssl', self.server_context) + kwds.setdefault("ssl", self.server_context) super().start_server(**kwds) - def start_client(self, path='/', **kwds): - kwds.setdefault('ssl', self.client_context) + def start_client(self, path="/", **kwds): + kwds.setdefault("ssl", self.client_context) super().start_client(path, **kwds) # TLS over Unix sockets doesn't make sense. @@ -1165,7 +1165,7 @@ def test_redirect_insecure(self): with temp_test_redirecting_server(self, FOUND, force_insecure=True): with self.assertRaises(InvalidHandshake): with temp_test_client(self): - self.fail('Did not raise') # pragma: no cover + self.fail("Did not raise") # pragma: no cover class ClientServerOriginTests(unittest.TestCase): @@ -1178,10 +1178,10 @@ def tearDown(self): def test_checking_origin_succeeds(self): server = self.loop.run_until_complete( - serve(handler, 'localhost', 0, origins=['http://localhost']) + serve(handler, "localhost", 0, origins=["http://localhost"]) ) client = self.loop.run_until_complete( - connect(get_server_uri(server), origin='http://localhost') + connect(get_server_uri(server), origin="http://localhost") ) self.loop.run_until_complete(client.send("Hello!")) @@ -1193,11 +1193,11 @@ def test_checking_origin_succeeds(self): def test_checking_origin_fails(self): server = self.loop.run_until_complete( - serve(handler, 'localhost', 0, origins=['http://localhost']) + serve(handler, "localhost", 0, origins=["http://localhost"]) ) with self.assertRaisesRegex(InvalidHandshake, "Status code not 101: 403"): self.loop.run_until_complete( - connect(get_server_uri(server), origin='http://otherhost') + connect(get_server_uri(server), origin="http://otherhost") ) server.close() @@ -1205,14 +1205,14 @@ def test_checking_origin_fails(self): def test_checking_origins_fails_with_multiple_headers(self): server = self.loop.run_until_complete( - serve(handler, 'localhost', 0, origins=['http://localhost']) + serve(handler, "localhost", 0, origins=["http://localhost"]) ) with self.assertRaisesRegex(InvalidHandshake, "Status code not 101: 400"): self.loop.run_until_complete( connect( get_server_uri(server), - origin='http://localhost', - extra_headers=[('Origin', 'http://otherhost')], + origin="http://localhost", + extra_headers=[("Origin", "http://otherhost")], ) ) @@ -1221,7 +1221,7 @@ def test_checking_origins_fails_with_multiple_headers(self): def test_checking_lack_of_origin_succeeds(self): server = self.loop.run_until_complete( - serve(handler, 'localhost', 0, origins=[None]) + serve(handler, "localhost", 0, origins=[None]) ) client = self.loop.run_until_complete(connect(get_server_uri(server))) @@ -1235,7 +1235,7 @@ def test_checking_lack_of_origin_succeeds(self): def test_checking_lack_of_origin_succeeds_backwards_compatibility(self): with warnings.catch_warnings(record=True) as recorded_warnings: server = self.loop.run_until_complete( - serve(handler, 'localhost', 0, origins=['']) + serve(handler, "localhost", 0, origins=[""]) ) client = self.loop.run_until_complete(connect(get_server_uri(server))) @@ -1261,7 +1261,7 @@ def tearDown(self): self.loop.close() def test_client(self): - start_server = serve(handler, 'localhost', 0) + start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) @asyncio.coroutine @@ -1281,7 +1281,7 @@ def test_server(self): @asyncio.coroutine def run_server(): # Yield from serve. - server = yield from serve(handler, 'localhost', 0) + server = yield from serve(handler, "localhost", 0) self.assertTrue(server.sockets) server.close() yield from server.wait_closed() diff --git a/tests/test_framing.py b/tests/test_framing.py index ab11f6bdc..83d0a251a 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -59,63 +59,63 @@ def round_trip_close(self, data, code, reason): self.assertEqual(serialized, data) def test_text(self): - self.round_trip(b'\x81\x04Spam', Frame(True, OP_TEXT, b'Spam')) + self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) def test_text_masked(self): self.round_trip( - b'\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5', - Frame(True, OP_TEXT, b'Spam'), + b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", + Frame(True, OP_TEXT, b"Spam"), mask=True, ) def test_binary(self): - self.round_trip(b'\x82\x04Eggs', Frame(True, OP_BINARY, b'Eggs')) + self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs")) def test_binary_masked(self): self.round_trip( - b'\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa', - Frame(True, OP_BINARY, b'Eggs'), + b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", + Frame(True, OP_BINARY, b"Eggs"), mask=True, ) def test_non_ascii_text(self): self.round_trip( - b'\x81\x05caf\xc3\xa9', Frame(True, OP_TEXT, 'café'.encode('utf-8')) + b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8")) ) def test_non_ascii_text_masked(self): self.round_trip( - b'\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd', - Frame(True, OP_TEXT, 'café'.encode('utf-8')), + b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", + Frame(True, OP_TEXT, "café".encode("utf-8")), mask=True, ) def test_close(self): - self.round_trip(b'\x88\x00', Frame(True, OP_CLOSE, b'')) + self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b"")) def test_ping(self): - self.round_trip(b'\x89\x04ping', Frame(True, OP_PING, b'ping')) + self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping")) def test_pong(self): - self.round_trip(b'\x8a\x04pong', Frame(True, OP_PONG, b'pong')) + self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong")) def test_long(self): self.round_trip( - b'\x82\x7e\x00\x7e' + 126 * b'a', Frame(True, OP_BINARY, 126 * b'a') + b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a") ) def test_very_long(self): self.round_trip( - b'\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00' + 65536 * b'a', - Frame(True, OP_BINARY, 65536 * b'a'), + b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", + Frame(True, OP_BINARY, 65536 * b"a"), ) def test_payload_too_big(self): with self.assertRaises(PayloadTooBig): - self.decode(b'\x82\x7e\x04\x01' + 1025 * b'a', max_size=1024) + self.decode(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024) def test_bad_reserved_bits(self): - for encoded in [b'\xc0\x00', b'\xa0\x00', b'\x90\x00']: + for encoded in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: with self.subTest(encoded=encoded): with self.assertRaises(WebSocketProtocolError): self.decode(encoded) @@ -135,41 +135,41 @@ def test_bad_opcode(self): def test_mask_flag(self): # Mask flag correctly set. - self.decode(b'\x80\x80\x00\x00\x00\x00', mask=True) + self.decode(b"\x80\x80\x00\x00\x00\x00", mask=True) # Mask flag incorrectly unset. with self.assertRaises(WebSocketProtocolError): - self.decode(b'\x80\x80\x00\x00\x00\x00') + self.decode(b"\x80\x80\x00\x00\x00\x00") # Mask flag correctly unset. - self.decode(b'\x80\x00') + self.decode(b"\x80\x00") # Mask flag incorrectly set. with self.assertRaises(WebSocketProtocolError): - self.decode(b'\x80\x00', mask=True) + self.decode(b"\x80\x00", mask=True) def test_control_frame_max_length(self): # At maximum allowed length. - self.decode(b'\x88\x7e\x00\x7d' + 125 * b'a') + self.decode(b"\x88\x7e\x00\x7d" + 125 * b"a") # Above maximum allowed length. with self.assertRaises(WebSocketProtocolError): - self.decode(b'\x88\x7e\x00\x7e' + 126 * b'a') + self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a") def test_prepare_data_str(self): - self.assertEqual(prepare_data('café'), (OP_TEXT, b'caf\xc3\xa9')) + self.assertEqual(prepare_data("café"), (OP_TEXT, b"caf\xc3\xa9")) def test_prepare_data_bytes(self): - self.assertEqual(prepare_data(b'tea'), (OP_BINARY, b'tea')) + self.assertEqual(prepare_data(b"tea"), (OP_BINARY, b"tea")) def test_prepare_data_bytearray(self): self.assertEqual( - prepare_data(bytearray(b'tea')), (OP_BINARY, bytearray(b'tea')) + prepare_data(bytearray(b"tea")), (OP_BINARY, bytearray(b"tea")) ) def test_prepare_data_memoryview(self): self.assertEqual( - prepare_data(memoryview(b'tea')), (OP_BINARY, memoryview(b'tea')) + prepare_data(memoryview(b"tea")), (OP_BINARY, memoryview(b"tea")) ) def test_prepare_data_non_contiguous_memoryview(self): - self.assertEqual(prepare_data(memoryview(b'tteeaa')[::2]), (OP_BINARY, b'tea')) + self.assertEqual(prepare_data(memoryview(b"tteeaa")[::2]), (OP_BINARY, b"tea")) def test_prepare_data_list(self): with self.assertRaises(TypeError): @@ -180,19 +180,19 @@ def test_prepare_data_none(self): prepare_data(None) def test_encode_data_str(self): - self.assertEqual(encode_data('café'), b'caf\xc3\xa9') + self.assertEqual(encode_data("café"), b"caf\xc3\xa9") def test_encode_data_bytes(self): - self.assertEqual(encode_data(b'tea'), b'tea') + self.assertEqual(encode_data(b"tea"), b"tea") def test_encode_data_bytearray(self): - self.assertEqual(encode_data(bytearray(b'tea')), b'tea') + self.assertEqual(encode_data(bytearray(b"tea")), b"tea") def test_encode_data_memoryview(self): - self.assertEqual(encode_data(memoryview(b'tea')), b'tea') + self.assertEqual(encode_data(memoryview(b"tea")), b"tea") def test_encode_data_non_contiguous_memoryview(self): - self.assertEqual(encode_data(memoryview(b'tteeaa')[::2]), b'tea') + self.assertEqual(encode_data(memoryview(b"tteeaa")[::2]), b"tea") def test_encode_data_list(self): with self.assertRaises(TypeError): @@ -204,29 +204,29 @@ def test_encode_data_none(self): def test_fragmented_control_frame(self): # Fin bit correctly set. - self.decode(b'\x88\x00') + self.decode(b"\x88\x00") # Fin bit incorrectly unset. with self.assertRaises(WebSocketProtocolError): - self.decode(b'\x08\x00') + self.decode(b"\x08\x00") def test_parse_close_and_serialize_close(self): - self.round_trip_close(b'\x03\xe8', 1000, '') - self.round_trip_close(b'\x03\xe8OK', 1000, 'OK') + self.round_trip_close(b"\x03\xe8", 1000, "") + self.round_trip_close(b"\x03\xe8OK", 1000, "OK") def test_parse_close_empty(self): - self.assertEqual(parse_close(b''), (1005, '')) + self.assertEqual(parse_close(b""), (1005, "")) def test_parse_close_errors(self): with self.assertRaises(WebSocketProtocolError): - parse_close(b'\x03') + parse_close(b"\x03") with self.assertRaises(WebSocketProtocolError): - parse_close(b'\x03\xe7') + parse_close(b"\x03\xe7") with self.assertRaises(UnicodeDecodeError): - parse_close(b'\x03\xe8\xff\xff') + parse_close(b"\x03\xe8\xff\xff") def test_serialize_close_errors(self): with self.assertRaises(WebSocketProtocolError): - serialize_close(999, '') + serialize_close(999, "") def test_extensions(self): class Rot13: @@ -234,7 +234,7 @@ class Rot13: def encode(frame): assert frame.opcode == OP_TEXT text = frame.data.decode() - data = codecs.encode(text, 'rot13').encode() + data = codecs.encode(text, "rot13").encode() return frame._replace(data=data) # This extensions is symmetrical. @@ -243,5 +243,5 @@ def decode(frame, *, max_size=None): return Rot13.encode(frame) self.round_trip( - b'\x81\x05uryyb', Frame(True, OP_TEXT, b'hello'), extensions=[Rot13()] + b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()] ) diff --git a/tests/test_handshake.py b/tests/test_handshake.py index a0cb55a9e..7d0477715 100644 --- a/tests/test_handshake.py +++ b/tests/test_handshake.py @@ -58,70 +58,70 @@ def assertInvalidRequestHeaders(self, exc_type): def test_request_invalid_connection(self): with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers['Connection'] - headers['Connection'] = 'Downgrade' + del headers["Connection"] + headers["Connection"] = "Downgrade" def test_request_missing_connection(self): with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers['Connection'] + del headers["Connection"] def test_request_additional_connection(self): with self.assertValidRequestHeaders() as headers: - headers['Connection'] = 'close' + headers["Connection"] = "close" def test_request_invalid_upgrade(self): with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers['Upgrade'] - headers['Upgrade'] = 'socketweb' + del headers["Upgrade"] + headers["Upgrade"] = "socketweb" def test_request_missing_upgrade(self): with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers['Upgrade'] + del headers["Upgrade"] def test_request_additional_upgrade(self): with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - headers['Upgrade'] = 'socketweb' + headers["Upgrade"] = "socketweb" def test_request_invalid_key_not_base64(self): with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers['Sec-WebSocket-Key'] - headers['Sec-WebSocket-Key'] = "!@#$%^&*()" + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "!@#$%^&*()" def test_request_invalid_key_not_well_padded(self): with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers['Sec-WebSocket-Key'] - headers['Sec-WebSocket-Key'] = "CSIRmL8dWYxeAdr/XpEHRw" + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "CSIRmL8dWYxeAdr/XpEHRw" def test_request_invalid_key_not_16_bytes_long(self): with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers['Sec-WebSocket-Key'] - headers['Sec-WebSocket-Key'] = "ZLpprpvK4PE=" + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "ZLpprpvK4PE=" def test_request_missing_key(self): with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - del headers['Sec-WebSocket-Key'] + del headers["Sec-WebSocket-Key"] def test_request_additional_key(self): with self.assertInvalidRequestHeaders(InvalidHeader) as headers: # This duplicates the Sec-WebSocket-Key header. - headers['Sec-WebSocket-Key'] = headers['Sec-WebSocket-Key'] + headers["Sec-WebSocket-Key"] = headers["Sec-WebSocket-Key"] def test_request_invalid_version(self): with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers['Sec-WebSocket-Version'] - headers['Sec-WebSocket-Version'] = '42' + del headers["Sec-WebSocket-Version"] + headers["Sec-WebSocket-Version"] = "42" def test_request_missing_version(self): with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - del headers['Sec-WebSocket-Version'] + del headers["Sec-WebSocket-Version"] def test_request_additional_version(self): with self.assertInvalidRequestHeaders(InvalidHeader) as headers: # This duplicates the Sec-WebSocket-Version header. - headers['Sec-WebSocket-Version'] = headers['Sec-WebSocket-Version'] + headers["Sec-WebSocket-Version"] = headers["Sec-WebSocket-Version"] @contextlib.contextmanager - def assertValidResponseHeaders(self, key='CSIRmL8dWYxeAdr/XpEHRw=='): + def assertValidResponseHeaders(self, key="CSIRmL8dWYxeAdr/XpEHRw=="): """ Provide response headers for modification. @@ -134,7 +134,7 @@ def assertValidResponseHeaders(self, key='CSIRmL8dWYxeAdr/XpEHRw=='): check_response(headers, key) @contextlib.contextmanager - def assertInvalidResponseHeaders(self, exc_type, key='CSIRmL8dWYxeAdr/XpEHRw=='): + def assertInvalidResponseHeaders(self, exc_type, key="CSIRmL8dWYxeAdr/XpEHRw=="): """ Provide response headers for modification. @@ -150,41 +150,41 @@ def assertInvalidResponseHeaders(self, exc_type, key='CSIRmL8dWYxeAdr/XpEHRw==') def test_response_invalid_connection(self): with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers['Connection'] - headers['Connection'] = 'Downgrade' + del headers["Connection"] + headers["Connection"] = "Downgrade" def test_response_missing_connection(self): with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers['Connection'] + del headers["Connection"] def test_response_additional_connection(self): with self.assertValidResponseHeaders() as headers: - headers['Connection'] = 'close' + headers["Connection"] = "close" def test_response_invalid_upgrade(self): with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers['Upgrade'] - headers['Upgrade'] = 'socketweb' + del headers["Upgrade"] + headers["Upgrade"] = "socketweb" def test_response_missing_upgrade(self): with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers['Upgrade'] + del headers["Upgrade"] def test_response_additional_upgrade(self): with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - headers['Upgrade'] = 'socketweb' + headers["Upgrade"] = "socketweb" def test_response_invalid_accept(self): with self.assertInvalidResponseHeaders(InvalidHeaderValue) as headers: - del headers['Sec-WebSocket-Accept'] + del headers["Sec-WebSocket-Accept"] other_key = "1Eq4UDEFQYg3YspNgqxv5g==" - headers['Sec-WebSocket-Accept'] = accept(other_key) + headers["Sec-WebSocket-Accept"] = accept(other_key) def test_response_missing_accept(self): with self.assertInvalidResponseHeaders(InvalidHeader) as headers: - del headers['Sec-WebSocket-Accept'] + del headers["Sec-WebSocket-Accept"] def test_response_additional_accept(self): with self.assertInvalidResponseHeaders(InvalidHeader) as headers: # This duplicates the Sec-WebSocket-Accept header. - headers['Sec-WebSocket-Accept'] = headers['Sec-WebSocket-Accept'] + headers["Sec-WebSocket-Accept"] = headers["Sec-WebSocket-Accept"] diff --git a/tests/test_headers.py b/tests/test_headers.py index 7d52b9f74..f03dc83cf 100644 --- a/tests/test_headers.py +++ b/tests/test_headers.py @@ -9,16 +9,16 @@ class HeadersTests(unittest.TestCase): def test_parse_connection(self): for header, parsed in [ # Realistic use cases - ('Upgrade', ['Upgrade']), # Safari, Chrome - ('keep-alive, Upgrade', ['keep-alive', 'Upgrade']), # Firefox + ("Upgrade", ["Upgrade"]), # Safari, Chrome + ("keep-alive, Upgrade", ["keep-alive", "Upgrade"]), # Firefox # Pathological example - (',,\t, , ,Upgrade ,,', ['Upgrade']), + (",,\t, , ,Upgrade ,,", ["Upgrade"]), ]: with self.subTest(header=header): self.assertEqual(parse_connection(header), parsed) def test_parse_connection_invalid_header(self): - for header in ['???', 'keep-alive; Upgrade']: + for header in ["???", "keep-alive; Upgrade"]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): parse_connection(header) @@ -26,17 +26,17 @@ def test_parse_connection_invalid_header(self): def test_parse_upgrade(self): for header, parsed in [ # Realistic use case - ('websocket', ['websocket']), + ("websocket", ["websocket"]), # Synthetic example - ('http/3.0, websocket', ['http/3.0', 'websocket']), + ("http/3.0, websocket", ["http/3.0", "websocket"]), # Pathological example - (',, WebSocket, \t,,', ['WebSocket']), + (",, WebSocket, \t,,", ["WebSocket"]), ]: with self.subTest(header=header): self.assertEqual(parse_upgrade(header), parsed) def test_parse_upgrade_invalid_header(self): - for header in ['???', 'websocket 2', 'http/3.0; websocket']: + for header in ["???", "websocket 2", "http/3.0; websocket"]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): parse_upgrade(header) @@ -44,37 +44,37 @@ def test_parse_upgrade_invalid_header(self): def test_parse_extension_list(self): for header, parsed in [ # Synthetic examples - ('foo', [('foo', [])]), - ('foo, bar', [('foo', []), ('bar', [])]), + ("foo", [("foo", [])]), + ("foo, bar", [("foo", []), ("bar", [])]), ( 'foo; name; token=token; quoted-string="quoted-string", ' - 'bar; quux; quuux', + "bar; quux; quuux", [ ( - 'foo', + "foo", [ - ('name', None), - ('token', 'token'), - ('quoted-string', 'quoted-string'), + ("name", None), + ("token", "token"), + ("quoted-string", "quoted-string"), ], ), - ('bar', [('quux', None), ('quuux', None)]), + ("bar", [("quux", None), ("quuux", None)]), ], ), # Pathological example ( - ',\t, , ,foo ;bar = 42,, baz,,', - [('foo', [('bar', '42')]), ('baz', [])], + ",\t, , ,foo ;bar = 42,, baz,,", + [("foo", [("bar", "42")]), ("baz", [])], ), # Realistic use cases for permessage-deflate - ('permessage-deflate', [('permessage-deflate', [])]), + ("permessage-deflate", [("permessage-deflate", [])]), ( - 'permessage-deflate; client_max_window_bits', - [('permessage-deflate', [('client_max_window_bits', None)])], + "permessage-deflate; client_max_window_bits", + [("permessage-deflate", [("client_max_window_bits", None)])], ), ( - 'permessage-deflate; server_max_window_bits=10', - [('permessage-deflate', [('server_max_window_bits', '10')])], + "permessage-deflate; server_max_window_bits=10", + [("permessage-deflate", [("server_max_window_bits", "10")])], ), ]: with self.subTest(header=header): @@ -86,14 +86,14 @@ def test_parse_extension_list(self): def test_parse_extension_list_invalid_header(self): for header in [ # Truncated examples - '', - ',\t,', - 'foo;', - 'foo; bar;', - 'foo; bar=', + "", + ",\t,", + "foo;", + "foo; bar;", + "foo; bar=", 'foo; bar="baz', # Wrong delimiter - 'foo, bar, baz=quux; quuux', + "foo, bar, baz=quux; quuux", # Value in quoted string parameter that isn't a token 'foo; bar=" "', ]: @@ -104,10 +104,10 @@ def test_parse_extension_list_invalid_header(self): def test_parse_subprotocol_list(self): for header, parsed in [ # Synthetic examples - ('foo', ['foo']), - ('foo, bar', ['foo', 'bar']), + ("foo", ["foo"]), + ("foo, bar", ["foo", "bar"]), # Pathological example - (',\t, , ,foo ,, bar,baz,,', ['foo', 'bar', 'baz']), + (",\t, , ,foo ,, bar,baz,,", ["foo", "bar", "baz"]), ]: with self.subTest(header=header): self.assertEqual(parse_subprotocol_list(header), parsed) @@ -118,10 +118,10 @@ def test_parse_subprotocol_list(self): def test_parse_subprotocol_list_invalid_header(self): for header in [ # Truncated examples - '', - ',\t,' + "", + ",\t," # Wrong delimiter - 'foo; bar', + "foo; bar", ]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): @@ -131,5 +131,5 @@ def test_build_basic_auth(self): # Test vector from RFC 7617. self.assertEqual( build_basic_auth("Aladdin", "open sesame"), - 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==', + "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ) diff --git a/tests/test_http.py b/tests/test_http.py index c222b370f..b28bed6ce 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -19,92 +19,92 @@ def tearDown(self): def test_read_request(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( - b'GET /chat HTTP/1.1\r\n' - b'Host: server.example.com\r\n' - b'Upgrade: websocket\r\n' - b'Connection: Upgrade\r\n' - b'Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n' - b'Origin: http://example.com\r\n' - b'Sec-WebSocket-Protocol: chat, superchat\r\n' - b'Sec-WebSocket-Version: 13\r\n' - b'\r\n' + b"GET /chat HTTP/1.1\r\n" + b"Host: server.example.com\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + b"Origin: http://example.com\r\n" + b"Sec-WebSocket-Protocol: chat, superchat\r\n" + b"Sec-WebSocket-Version: 13\r\n" + b"\r\n" ) path, headers = self.loop.run_until_complete(read_request(self.stream)) - self.assertEqual(path, '/chat') - self.assertEqual(headers['Upgrade'], 'websocket') + self.assertEqual(path, "/chat") + self.assertEqual(headers["Upgrade"], "websocket") def test_read_response(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( - b'HTTP/1.1 101 Switching Protocols\r\n' - b'Upgrade: websocket\r\n' - b'Connection: Upgrade\r\n' - b'Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n' - b'Sec-WebSocket-Protocol: chat\r\n' - b'\r\n' + b"HTTP/1.1 101 Switching Protocols\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" + b"Sec-WebSocket-Protocol: chat\r\n" + b"\r\n" ) status_code, reason, headers = self.loop.run_until_complete( read_response(self.stream) ) self.assertEqual(status_code, 101) - self.assertEqual(reason, 'Switching Protocols') - self.assertEqual(headers['Upgrade'], 'websocket') + self.assertEqual(reason, "Switching Protocols") + self.assertEqual(headers["Upgrade"], "websocket") def test_request_method(self): - self.stream.feed_data(b'OPTIONS * HTTP/1.1\r\n\r\n') + self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_request(self.stream)) def test_request_version(self): - self.stream.feed_data(b'GET /chat HTTP/1.0\r\n\r\n') + self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_request(self.stream)) def test_response_version(self): - self.stream.feed_data(b'HTTP/1.0 400 Bad Request\r\n\r\n') + self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_response(self.stream)) def test_response_status(self): - self.stream.feed_data(b'HTTP/1.1 007 My name is Bond\r\n\r\n') + self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_response(self.stream)) def test_response_reason(self): - self.stream.feed_data(b'HTTP/1.1 200 \x7f\r\n\r\n') + self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_response(self.stream)) def test_header_name(self): - self.stream.feed_data(b'foo bar: baz qux\r\n\r\n') + self.stream.feed_data(b"foo bar: baz qux\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_headers(self.stream)) def test_header_value(self): - self.stream.feed_data(b'foo: \x00\x00\x0f\r\n\r\n') + self.stream.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_headers(self.stream)) def test_headers_limit(self): - self.stream.feed_data(b'foo: bar\r\n' * 257 + b'\r\n') + self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_headers(self.stream)) def test_line_limit(self): # Header line contains 5 + 4090 + 2 = 4097 bytes. - self.stream.feed_data(b'foo: ' + b'a' * 4090 + b'\r\n\r\n') + self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_headers(self.stream)) def test_line_ending(self): - self.stream.feed_data(b'foo: bar\n\n') + self.stream.feed_data(b"foo: bar\n\n") with self.assertRaises(ValueError): self.loop.run_until_complete(read_headers(self.stream)) class HeadersTests(unittest.TestCase): def setUp(self): - self.headers = Headers([('Connection', 'Upgrade'), ('Server', USER_AGENT)]) + self.headers = Headers([("Connection", "Upgrade"), ("Server", USER_AGENT)]) def test_str(self): self.assertEqual( @@ -120,67 +120,67 @@ def test_repr(self): ) def test_multiple_values_error_str(self): - self.assertEqual(str(MultipleValuesError('Connection')), "'Connection'") + self.assertEqual(str(MultipleValuesError("Connection")), "'Connection'") self.assertEqual(str(MultipleValuesError()), "") def test_contains(self): - self.assertIn('Server', self.headers) + self.assertIn("Server", self.headers) def test_contains_case_insensitive(self): - self.assertIn('server', self.headers) + self.assertIn("server", self.headers) def test_contains_not_found(self): - self.assertNotIn('Date', self.headers) + self.assertNotIn("Date", self.headers) def test_iter(self): - self.assertEqual(set(iter(self.headers)), {'connection', 'server'}) + self.assertEqual(set(iter(self.headers)), {"connection", "server"}) def test_len(self): self.assertEqual(len(self.headers), 2) def test_getitem(self): - self.assertEqual(self.headers['Server'], USER_AGENT) + self.assertEqual(self.headers["Server"], USER_AGENT) def test_getitem_case_insensitive(self): - self.assertEqual(self.headers['server'], USER_AGENT) + self.assertEqual(self.headers["server"], USER_AGENT) def test_getitem_key_error(self): with self.assertRaises(KeyError): - self.headers['Upgrade'] + self.headers["Upgrade"] def test_getitem_multiple_values_error(self): - self.headers['Server'] = '2' + self.headers["Server"] = "2" with self.assertRaises(MultipleValuesError): - self.headers['Server'] + self.headers["Server"] def test_setitem(self): - self.headers['Upgrade'] = 'websocket' - self.assertEqual(self.headers['Upgrade'], 'websocket') + self.headers["Upgrade"] = "websocket" + self.assertEqual(self.headers["Upgrade"], "websocket") def test_setitem_case_insensitive(self): - self.headers['upgrade'] = 'websocket' - self.assertEqual(self.headers['Upgrade'], 'websocket') + self.headers["upgrade"] = "websocket" + self.assertEqual(self.headers["Upgrade"], "websocket") def test_setitem_multiple_values(self): - self.headers['Connection'] = 'close' + self.headers["Connection"] = "close" with self.assertRaises(MultipleValuesError): - self.headers['Connection'] + self.headers["Connection"] def test_delitem(self): - del self.headers['Connection'] + del self.headers["Connection"] with self.assertRaises(KeyError): - self.headers['Connection'] + self.headers["Connection"] def test_delitem_case_insensitive(self): - del self.headers['connection'] + del self.headers["connection"] with self.assertRaises(KeyError): - self.headers['Connection'] + self.headers["Connection"] def test_delitem_multiple_values(self): - self.headers['Connection'] = 'close' - del self.headers['Connection'] + self.headers["Connection"] = "close" + del self.headers["Connection"] with self.assertRaises(KeyError): - self.headers['Connection'] + self.headers["Connection"] def test_eq(self): other_headers = self.headers.copy() @@ -195,20 +195,20 @@ def test_clear(self): self.assertEqual(self.headers, Headers()) def test_get_all(self): - self.assertEqual(self.headers.get_all('Connection'), ['Upgrade']) + self.assertEqual(self.headers.get_all("Connection"), ["Upgrade"]) def test_get_all_case_insensitive(self): - self.assertEqual(self.headers.get_all('connection'), ['Upgrade']) + self.assertEqual(self.headers.get_all("connection"), ["Upgrade"]) def test_get_all_no_values(self): - self.assertEqual(self.headers.get_all('Upgrade'), []) + self.assertEqual(self.headers.get_all("Upgrade"), []) def test_get_all_multiple_values(self): - self.headers['Connection'] = 'close' - self.assertEqual(self.headers.get_all('Connection'), ['Upgrade', 'close']) + self.headers["Connection"] = "close" + self.assertEqual(self.headers.get_all("Connection"), ["Upgrade", "close"]) def test_raw_items(self): self.assertEqual( list(self.headers.raw_items()), - [('Connection', 'Upgrade'), ('Server', USER_AGENT)], + [("Connection", "Upgrade"), ("Server", USER_AGENT)], ) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index a5eb251c9..cb562e647 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -19,14 +19,14 @@ # Unit for timeouts. May be increased on slow machines by setting the # WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. -MS = 0.001 * int(os.environ.get('WEBSOCKETS_TESTS_TIMEOUT_FACTOR', 1)) +MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) # asyncio's debug mode has a 10x performance penalty for this test suite. -if os.environ.get('PYTHONASYNCIODEBUG'): # pragma: no cover +if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover MS *= 10 # Ensure that timeouts are larger than the clock's resolution (for Windows). -MS = max(MS, 2.5 * time.get_clock_info('monotonic').resolution) +MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) class TransportMock(unittest.mock.Mock): @@ -126,9 +126,9 @@ def delayed_drain(): self.protocol.writer.drain = delayed_drain - close_frame = Frame(True, OP_CLOSE, serialize_close(1000, 'close')) - local_close = Frame(True, OP_CLOSE, serialize_close(1000, 'local')) - remote_close = Frame(True, OP_CLOSE, serialize_close(1000, 'remote')) + close_frame = Frame(True, OP_CLOSE, serialize_close(1000, "close")) + local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local")) + remote_close = Frame(True, OP_CLOSE, serialize_close(1000, "remote")) @property def ensure_future(self): @@ -166,7 +166,7 @@ def receive_eof_if_client(self): if self.protocol.is_client: self.receive_eof() - def close_connection(self, code=1000, reason='close'): + def close_connection(self, code=1000, reason="close"): """ Execute a closing handshake. @@ -184,7 +184,7 @@ def close_connection(self, code=1000, reason='close'): assert self.protocol.state is State.CLOSED - def half_close_connection_local(self, code=1000, reason='close'): + def half_close_connection_local(self, code=1000, reason="close"): """ Start a closing handshake but do not complete it. @@ -215,7 +215,7 @@ def half_close_connection_local(self, code=1000, reason='close'): # This task must be awaited or canceled by the caller. return close_task - def half_close_connection_remote(self, code=1000, reason='close'): + def half_close_connection_remote(self, code=1000, reason="close"): """ Receive a closing handshake but do not complete it. @@ -310,7 +310,7 @@ def assertConnectionFailed(self, code, message): self.assertEqual(self.protocol.state, State.CLOSED) # No close frame was received. self.assertEqual(self.protocol.close_code, 1006) - self.assertEqual(self.protocol.close_reason, '') + self.assertEqual(self.protocol.close_reason, "") # A close frame was sent -- unless the connection was already lost. if code == 1006: self.assertNoFrameSent() @@ -329,11 +329,11 @@ def assertCompletesWithin(self, min_time, max_time): # Test public attributes. def test_local_address(self): - get_extra_info = unittest.mock.Mock(return_value=('host', 4312)) + get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) self.transport.get_extra_info = get_extra_info - self.assertEqual(self.protocol.local_address, ('host', 4312)) - get_extra_info.assert_called_with('sockname', None) + self.assertEqual(self.protocol.local_address, ("host", 4312)) + get_extra_info.assert_called_with("sockname", None) def test_local_address_before_connection(self): # Emulate the situation before connection_open() runs. @@ -345,11 +345,11 @@ def test_local_address_before_connection(self): self.protocol.writer = _writer def test_remote_address(self): - get_extra_info = unittest.mock.Mock(return_value=('host', 4312)) + get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) self.transport.get_extra_info = get_extra_info - self.assertEqual(self.protocol.remote_address, ('host', 4312)) - get_extra_info.assert_called_with('peername', None) + self.assertEqual(self.protocol.remote_address, ("host", 4312)) + get_extra_info.assert_called_with("peername", None) def test_remote_address_before_connection(self): # Emulate the situation before connection_open() runs. @@ -379,14 +379,14 @@ def test_wait_closed(self): # Test the recv coroutine. def test_recv_text(self): - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café') + self.assertEqual(data, "café") def test_recv_binary(self): - self.receive_frame(Frame(True, OP_BINARY, b'tea')) + self.receive_frame(Frame(True, OP_BINARY, b"tea")) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b'tea') + self.assertEqual(data, b"tea") def test_recv_on_closing_connection_local(self): close_task = self.half_close_connection_local() @@ -409,38 +409,38 @@ def test_recv_on_closed_connection(self): self.loop.run_until_complete(self.protocol.recv()) def test_recv_protocol_error(self): - self.receive_frame(Frame(True, OP_CONT, 'café'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8"))) self.process_invalid_frames() - self.assertConnectionFailed(1002, '') + self.assertConnectionFailed(1002, "") def test_recv_unicode_error(self): - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('latin-1'))) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("latin-1"))) self.process_invalid_frames() - self.assertConnectionFailed(1007, '') + self.assertConnectionFailed(1007, "") def test_recv_text_payload_too_big(self): self.protocol.max_size = 1024 - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8') * 205)) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) self.process_invalid_frames() - self.assertConnectionFailed(1009, '') + self.assertConnectionFailed(1009, "") def test_recv_binary_payload_too_big(self): self.protocol.max_size = 1024 - self.receive_frame(Frame(True, OP_BINARY, b'tea' * 342)) + self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) self.process_invalid_frames() - self.assertConnectionFailed(1009, '') + self.assertConnectionFailed(1009, "") def test_recv_text_no_max_size(self): self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8') * 205)) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café' * 205) + self.assertEqual(data, "café" * 205) def test_recv_binary_no_max_size(self): self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(True, OP_BINARY, b'tea' * 342)) + self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b'tea' * 342) + self.assertEqual(data, b"tea" * 342) def test_recv_queue_empty(self): recv = self.ensure_future(self.protocol.recv()) @@ -449,32 +449,32 @@ def test_recv_queue_empty(self): asyncio.wait_for(asyncio.shield(recv), timeout=MS) ) - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) data = self.loop.run_until_complete(recv) - self.assertEqual(data, 'café') + self.assertEqual(data, "café") def test_recv_queue_full(self): self.protocol.max_queue = 2 # Test internals because it's hard to verify buffers from the outside. self.assertEqual(list(self.protocol.messages), []) - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ['café']) + self.assertEqual(list(self.protocol.messages), ["café"]) - self.receive_frame(Frame(True, OP_BINARY, b'tea')) + self.receive_frame(Frame(True, OP_BINARY, b"tea")) self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ['café', b'tea']) + self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) - self.receive_frame(Frame(True, OP_BINARY, b'milk')) + self.receive_frame(Frame(True, OP_BINARY, b"milk")) self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ['café', b'tea']) + self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(list(self.protocol.messages), [b'tea', b'milk']) + self.assertEqual(list(self.protocol.messages), [b"tea", b"milk"]) self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(list(self.protocol.messages), [b'milk']) + self.assertEqual(list(self.protocol.messages), [b"milk"]) self.loop.run_until_complete(self.protocol.recv()) self.assertEqual(list(self.protocol.messages), []) @@ -486,7 +486,7 @@ def read_message(): self.protocol.read_message = read_message self.process_invalid_frames() - self.assertConnectionFailed(1011, '') + self.assertConnectionFailed(1011, "") def test_recv_canceled(self): recv = self.ensure_future(self.protocol.recv()) @@ -496,26 +496,26 @@ def test_recv_canceled(self): self.loop.run_until_complete(recv) # The next frame doesn't disappear in a vacuum (it used to). - self.receive_frame(Frame(True, OP_TEXT, 'café'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café') + self.assertEqual(data, "café") def test_recv_canceled_race_condition(self): recv = self.ensure_future( asyncio.wait_for(self.protocol.recv(), timeout=0.000001) ) self.loop.call_soon( - self.receive_frame, Frame(True, OP_TEXT, 'café'.encode('utf-8')) + self.receive_frame, Frame(True, OP_TEXT, "café".encode("utf-8")) ) with self.assertRaises(asyncio.TimeoutError): self.loop.run_until_complete(recv) # The previous frame doesn't disappear in a vacuum (it used to). - self.receive_frame(Frame(True, OP_TEXT, 'tea'.encode('utf-8'))) + self.receive_frame(Frame(True, OP_TEXT, "tea".encode("utf-8"))) data = self.loop.run_until_complete(self.protocol.recv()) # If we're getting "tea" there, it means "café" was swallowed (ha, ha). - self.assertEqual(data, 'café') + self.assertEqual(data, "café") def test_recv_prevents_concurrent_calls(self): recv = self.ensure_future(self.protocol.recv()) @@ -528,24 +528,24 @@ def test_recv_prevents_concurrent_calls(self): # Test the send coroutine. def test_send_text(self): - self.loop.run_until_complete(self.protocol.send('café')) - self.assertOneFrameSent(True, OP_TEXT, 'café'.encode('utf-8')) + self.loop.run_until_complete(self.protocol.send("café")) + self.assertOneFrameSent(True, OP_TEXT, "café".encode("utf-8")) def test_send_binary(self): - self.loop.run_until_complete(self.protocol.send(b'tea')) - self.assertOneFrameSent(True, OP_BINARY, b'tea') + self.loop.run_until_complete(self.protocol.send(b"tea")) + self.assertOneFrameSent(True, OP_BINARY, b"tea") def test_send_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.send(bytearray(b'tea'))) - self.assertOneFrameSent(True, OP_BINARY, b'tea') + self.loop.run_until_complete(self.protocol.send(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_BINARY, b"tea") def test_send_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.send(memoryview(b'tea'))) - self.assertOneFrameSent(True, OP_BINARY, b'tea') + self.loop.run_until_complete(self.protocol.send(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_BINARY, b"tea") def test_send_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.send(memoryview(b'tteeaa')[::2])) - self.assertOneFrameSent(True, OP_BINARY, b'tea') + self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_BINARY, b"tea") def test_send_type_error(self): with self.assertRaises(TypeError): @@ -553,41 +553,41 @@ def test_send_type_error(self): self.assertNoFrameSent() def test_send_iterable_text(self): - self.loop.run_until_complete(self.protocol.send(['ca', 'fé'])) + self.loop.run_until_complete(self.protocol.send(["ca", "fé"])) self.assertFramesSent( - (False, OP_TEXT, 'ca'.encode('utf-8')), - (False, OP_CONT, 'fé'.encode('utf-8')), - (True, OP_CONT, ''.encode('utf-8')), + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), ) def test_send_iterable_binary(self): - self.loop.run_until_complete(self.protocol.send([b'te', b'a'])) + self.loop.run_until_complete(self.protocol.send([b"te", b"a"])) self.assertFramesSent( - (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) def test_send_iterable_binary_from_bytearray(self): self.loop.run_until_complete( - self.protocol.send([bytearray(b'te'), bytearray(b'a')]) + self.protocol.send([bytearray(b"te"), bytearray(b"a")]) ) self.assertFramesSent( - (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) def test_send_iterable_binary_from_memoryview(self): self.loop.run_until_complete( - self.protocol.send([memoryview(b'te'), memoryview(b'a')]) + self.protocol.send([memoryview(b"te"), memoryview(b"a")]) ) self.assertFramesSent( - (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) def test_send_iterable_binary_from_non_contiguous_memoryview(self): self.loop.run_until_complete( - self.protocol.send([memoryview(b'ttee')[::2], memoryview(b'aa')[::2]]) + self.protocol.send([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) ) self.assertFramesSent( - (False, OP_BINARY, b'te'), (False, OP_CONT, b'a'), (True, OP_CONT, b'') + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) def test_send_empty_iterable(self): @@ -601,17 +601,17 @@ def test_send_iterable_type_error(self): def test_send_iterable_mixed_type_error(self): with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send(['café', b'tea'])) + self.loop.run_until_complete(self.protocol.send(["café", b"tea"])) self.assertFramesSent( - (False, OP_TEXT, 'café'.encode('utf-8')), - (True, OP_CLOSE, serialize_close(1011, '')), + (False, OP_TEXT, "café".encode("utf-8")), + (True, OP_CLOSE, serialize_close(1011, "")), ) def test_send_on_closing_connection_local(self): close_task = self.half_close_connection_local() with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send('foobar')) + self.loop.run_until_complete(self.protocol.send("foobar")) self.assertNoFrameSent() @@ -621,7 +621,7 @@ def test_send_on_closing_connection_remote(self): self.half_close_connection_remote() with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send('foobar')) + self.loop.run_until_complete(self.protocol.send("foobar")) self.assertNoFrameSent() @@ -629,7 +629,7 @@ def test_send_on_closed_connection(self): self.close_connection() with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send('foobar')) + self.loop.run_until_complete(self.protocol.send("foobar")) self.assertNoFrameSent() @@ -645,24 +645,24 @@ def test_ping_default(self): self.assertOneFrameSent(True, OP_PING, ping_data) def test_ping_text(self): - self.loop.run_until_complete(self.protocol.ping('café')) - self.assertOneFrameSent(True, OP_PING, 'café'.encode('utf-8')) + self.loop.run_until_complete(self.protocol.ping("café")) + self.assertOneFrameSent(True, OP_PING, "café".encode("utf-8")) def test_ping_binary(self): - self.loop.run_until_complete(self.protocol.ping(b'tea')) - self.assertOneFrameSent(True, OP_PING, b'tea') + self.loop.run_until_complete(self.protocol.ping(b"tea")) + self.assertOneFrameSent(True, OP_PING, b"tea") def test_ping_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.ping(bytearray(b'tea'))) - self.assertOneFrameSent(True, OP_PING, b'tea') + self.loop.run_until_complete(self.protocol.ping(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_PING, b"tea") def test_ping_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.ping(memoryview(b'tea'))) - self.assertOneFrameSent(True, OP_PING, b'tea') + self.loop.run_until_complete(self.protocol.ping(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_PING, b"tea") def test_ping_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.ping(memoryview(b'tteeaa')[::2])) - self.assertOneFrameSent(True, OP_PING, b'tea') + self.loop.run_until_complete(self.protocol.ping(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_PING, b"tea") def test_ping_type_error(self): with self.assertRaises(TypeError): @@ -699,27 +699,27 @@ def test_ping_on_closed_connection(self): def test_pong_default(self): self.loop.run_until_complete(self.protocol.pong()) - self.assertOneFrameSent(True, OP_PONG, b'') + self.assertOneFrameSent(True, OP_PONG, b"") def test_pong_text(self): - self.loop.run_until_complete(self.protocol.pong('café')) - self.assertOneFrameSent(True, OP_PONG, 'café'.encode('utf-8')) + self.loop.run_until_complete(self.protocol.pong("café")) + self.assertOneFrameSent(True, OP_PONG, "café".encode("utf-8")) def test_pong_binary(self): - self.loop.run_until_complete(self.protocol.pong(b'tea')) - self.assertOneFrameSent(True, OP_PONG, b'tea') + self.loop.run_until_complete(self.protocol.pong(b"tea")) + self.assertOneFrameSent(True, OP_PONG, b"tea") def test_pong_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.pong(bytearray(b'tea'))) - self.assertOneFrameSent(True, OP_PONG, b'tea') + self.loop.run_until_complete(self.protocol.pong(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_PONG, b"tea") def test_pong_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.pong(memoryview(b'tea'))) - self.assertOneFrameSent(True, OP_PONG, b'tea') + self.loop.run_until_complete(self.protocol.pong(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_PONG, b"tea") def test_pong_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.pong(memoryview(b'tteeaa')[::2])) - self.assertOneFrameSent(True, OP_PONG, b'tea') + self.loop.run_until_complete(self.protocol.pong(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_PONG, b"tea") def test_pong_type_error(self): with self.assertRaises(TypeError): @@ -755,12 +755,12 @@ def test_pong_on_closed_connection(self): # Test the protocol's logic for acknowledging pings with pongs. def test_answer_ping(self): - self.receive_frame(Frame(True, OP_PING, b'test')) + self.receive_frame(Frame(True, OP_PING, b"test")) self.run_loop_once() - self.assertOneFrameSent(True, OP_PONG, b'test') + self.assertOneFrameSent(True, OP_PONG, b"test") def test_ignore_pong(self): - self.receive_frame(Frame(True, OP_PONG, b'test')) + self.receive_frame(Frame(True, OP_PONG, b"test")) self.run_loop_once() self.assertNoFrameSent() @@ -789,7 +789,7 @@ def test_acknowledge_previous_pings(self): for i in range(3) ] # Unsolicited pong doesn't acknowledge pings - self.receive_frame(Frame(True, OP_PONG, b'')) + self.receive_frame(Frame(True, OP_PONG, b"")) self.run_loop_once() self.run_loop_once() self.assertFalse(pings[0][0].done()) @@ -814,84 +814,84 @@ def test_canceled_ping(self): self.assertTrue(ping.cancelled()) def test_duplicate_ping(self): - self.loop.run_until_complete(self.protocol.ping(b'foobar')) - self.assertOneFrameSent(True, OP_PING, b'foobar') + self.loop.run_until_complete(self.protocol.ping(b"foobar")) + self.assertOneFrameSent(True, OP_PING, b"foobar") with self.assertRaises(ValueError): - self.loop.run_until_complete(self.protocol.ping(b'foobar')) + self.loop.run_until_complete(self.protocol.ping(b"foobar")) self.assertNoFrameSent() # Test the protocol's logic for rebuilding fragmented messages. def test_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, 'ca'.encode('utf-8'))) - self.receive_frame(Frame(True, OP_CONT, 'fé'.encode('utf-8'))) + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café') + self.assertEqual(data, "café") def test_fragmented_binary(self): - self.receive_frame(Frame(False, OP_BINARY, b't')) - self.receive_frame(Frame(False, OP_CONT, b'e')) - self.receive_frame(Frame(True, OP_CONT, b'a')) + self.receive_frame(Frame(False, OP_BINARY, b"t")) + self.receive_frame(Frame(False, OP_CONT, b"e")) + self.receive_frame(Frame(True, OP_CONT, b"a")) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b'tea') + self.assertEqual(data, b"tea") def test_fragmented_text_payload_too_big(self): self.protocol.max_size = 1024 - self.receive_frame(Frame(False, OP_TEXT, 'café'.encode('utf-8') * 100)) - self.receive_frame(Frame(True, OP_CONT, 'café'.encode('utf-8') * 105)) + self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) self.process_invalid_frames() - self.assertConnectionFailed(1009, '') + self.assertConnectionFailed(1009, "") def test_fragmented_binary_payload_too_big(self): self.protocol.max_size = 1024 - self.receive_frame(Frame(False, OP_BINARY, b'tea' * 171)) - self.receive_frame(Frame(True, OP_CONT, b'tea' * 171)) + self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) + self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) self.process_invalid_frames() - self.assertConnectionFailed(1009, '') + self.assertConnectionFailed(1009, "") def test_fragmented_text_no_max_size(self): self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(False, OP_TEXT, 'café'.encode('utf-8') * 100)) - self.receive_frame(Frame(True, OP_CONT, 'café'.encode('utf-8') * 105)) + self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café' * 205) + self.assertEqual(data, "café" * 205) def test_fragmented_binary_no_max_size(self): self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(False, OP_BINARY, b'tea' * 171)) - self.receive_frame(Frame(True, OP_CONT, b'tea' * 171)) + self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) + self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b'tea' * 342) + self.assertEqual(data, b"tea" * 342) def test_control_frame_within_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, 'ca'.encode('utf-8'))) - self.receive_frame(Frame(True, OP_PING, b'')) - self.receive_frame(Frame(True, OP_CONT, 'fé'.encode('utf-8'))) + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_PING, b"")) + self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, 'café') - self.assertOneFrameSent(True, OP_PONG, b'') + self.assertEqual(data, "café") + self.assertOneFrameSent(True, OP_PONG, b"") def test_unterminated_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, 'ca'.encode('utf-8'))) + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) # Missing the second part of the fragmented frame. - self.receive_frame(Frame(True, OP_BINARY, b'tea')) + self.receive_frame(Frame(True, OP_BINARY, b"tea")) self.process_invalid_frames() - self.assertConnectionFailed(1002, '') + self.assertConnectionFailed(1002, "") def test_close_handshake_in_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, 'ca'.encode('utf-8'))) - self.receive_frame(Frame(True, OP_CLOSE, b'')) + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_CLOSE, b"")) self.process_invalid_frames() # The RFC may have overlooked this case: it says that control frames # can be interjected in the middle of a fragmented message and that a # close frame must be echoed. Even though there's an unterminated # message, technically, the closing handshake was successful. - self.assertConnectionClosed(1005, '') + self.assertConnectionClosed(1005, "") def test_connection_close_in_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, 'ca'.encode('utf-8'))) + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) self.process_invalid_frames() - self.assertConnectionFailed(1006, '') + self.assertConnectionFailed(1006, "") # Test miscellaneous code paths to ensure full coverage. @@ -899,7 +899,7 @@ def test_connection_lost(self): # Test calling connection_lost without going through close_connection. self.protocol.connection_lost(None) - self.assertConnectionFailed(1006, '') + self.assertConnectionFailed(1006, "") def test_ensure_open_before_opening_handshake(self): # Simulate a bug by forcibly reverting the protocol state. @@ -941,7 +941,7 @@ def test_connection_closed_attributes(self): connection_closed_exc = context.exception self.assertEqual(connection_closed_exc.code, 1000) - self.assertEqual(connection_closed_exc.reason, 'close') + self.assertEqual(connection_closed_exc.reason, "close") # Test the protocol logic for sending keepalive pings. @@ -988,7 +988,7 @@ def test_keepalive_ping_not_acknowledged_closes_connection(self): # Connection is closed at 6ms. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - self.assertOneFrameSent(True, OP_CLOSE, serialize_close(1011, '')) + self.assertOneFrameSent(True, OP_CLOSE, serialize_close(1011, "")) # The keepalive ping task is complete. self.assertEqual(self.protocol.keepalive_ping_task.result(), None) @@ -1061,15 +1061,15 @@ def test_local_close(self): self.loop.call_later(MS, self.receive_eof_if_client) # Run the closing handshake. - self.loop.run_until_complete(self.protocol.close(reason='close')) + self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") self.assertOneFrameSent(*self.close_frame) # Closing the connection again is a no-op. - self.loop.run_until_complete(self.protocol.close(reason='oh noes!')) + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") self.assertNoFrameSent() def test_remote_close(self): @@ -1082,13 +1082,13 @@ def test_remote_close(self): with self.assertRaises(ConnectionClosed): self.loop.run_until_complete(self.protocol.recv()) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") self.assertOneFrameSent(*self.close_frame) # Closing the connection again is a no-op. - self.loop.run_until_complete(self.protocol.close(reason='oh noes!')) + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") self.assertNoFrameSent() def test_simultaneous_close(self): @@ -1098,42 +1098,42 @@ def test_simultaneous_close(self): self.loop.call_soon(self.receive_frame, self.remote_close) self.loop.call_soon(self.receive_eof_if_client) - self.loop.run_until_complete(self.protocol.close(reason='local')) + self.loop.run_until_complete(self.protocol.close(reason="local")) - self.assertConnectionClosed(1000, 'remote') + self.assertConnectionClosed(1000, "remote") # The current implementation sends a close frame in response to the # close frame received from the remote end. It skips the close frame # that should be sent as a result of calling close(). self.assertOneFrameSent(*self.remote_close) def test_close_preserves_incoming_frames(self): - self.receive_frame(Frame(True, OP_TEXT, b'hello')) + self.receive_frame(Frame(True, OP_TEXT, b"hello")) self.loop.call_later(MS, self.receive_frame, self.close_frame) self.loop.call_later(MS, self.receive_eof_if_client) - self.loop.run_until_complete(self.protocol.close(reason='close')) + self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") self.assertOneFrameSent(*self.close_frame) next_message = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(next_message, 'hello') + self.assertEqual(next_message, "hello") def test_close_protocol_error(self): - invalid_close_frame = Frame(True, OP_CLOSE, b'\x00') + invalid_close_frame = Frame(True, OP_CLOSE, b"\x00") self.receive_frame(invalid_close_frame) self.receive_eof_if_client() self.run_loop_once() - self.loop.run_until_complete(self.protocol.close(reason='close')) + self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionFailed(1002, '') + self.assertConnectionFailed(1002, "") def test_close_connection_lost(self): self.receive_eof() self.run_loop_once() - self.loop.run_until_complete(self.protocol.close(reason='close')) + self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionFailed(1006, '') + self.assertConnectionFailed(1006, "") def test_local_close_during_recv(self): recv = self.ensure_future(self.protocol.recv()) @@ -1141,19 +1141,19 @@ def test_local_close_during_recv(self): self.loop.call_later(MS, self.receive_frame, self.close_frame) self.loop.call_later(MS, self.receive_eof_if_client) - self.loop.run_until_complete(self.protocol.close(reason='close')) + self.loop.run_until_complete(self.protocol.close(reason="close")) with self.assertRaises(ConnectionClosed): self.loop.run_until_complete(recv) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") # There is no test_remote_close_during_recv because it would be identical # to test_remote_close. def test_remote_close_during_send(self): self.make_drain_slow() - send = self.ensure_future(self.protocol.send('hello')) + send = self.ensure_future(self.protocol.send("hello")) self.receive_frame(self.close_frame) self.receive_eof() @@ -1161,7 +1161,7 @@ def test_remote_close_during_send(self): with self.assertRaises(ConnectionClosed): self.loop.run_until_complete(send) - self.assertConnectionClosed(1000, 'close') + self.assertConnectionClosed(1000, "close") # There is no test_local_close_during_send because this cannot really # happen, considering that writes are serialized. @@ -1171,7 +1171,7 @@ class ServerTests(CommonTests, unittest.TestCase): def setUp(self): super().setUp() self.protocol.is_client = False - self.protocol.side = 'server' + self.protocol.side = "server" def test_local_close_send_close_frame_timeout(self): self.protocol.close_timeout = 10 * MS @@ -1179,16 +1179,16 @@ def test_local_close_send_close_frame_timeout(self): # If we can't send a close frame, time out in 10ms. # Check the timing within -1/+9ms for robustness. with self.assertCompletesWithin(9 * MS, 19 * MS): - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1006, '') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") def test_local_close_receive_close_frame_timeout(self): self.protocol.close_timeout = 10 * MS # If the client doesn't send a close frame, time out in 10ms. # Check the timing within -1/+9ms for robustness. with self.assertCompletesWithin(9 * MS, 19 * MS): - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1006, '') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") def test_local_close_connection_lost_timeout_after_write_eof(self): self.protocol.close_timeout = 10 * MS @@ -1199,8 +1199,8 @@ def test_local_close_connection_lost_timeout_after_write_eof(self): # HACK: disable write_eof => other end drops connection emulation. self.transport._eof = True self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1000, 'close') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") def test_local_close_connection_lost_timeout_after_close(self): self.protocol.close_timeout = 10 * MS @@ -1214,15 +1214,15 @@ def test_local_close_connection_lost_timeout_after_close(self): # HACK: disable close => other end drops connection emulation. self.transport._closing = True self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1000, 'close') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") class ClientTests(CommonTests, unittest.TestCase): def setUp(self): super().setUp() self.protocol.is_client = True - self.protocol.side = 'client' + self.protocol.side = "client" def test_local_close_send_close_frame_timeout(self): self.protocol.close_timeout = 10 * MS @@ -1232,8 +1232,8 @@ def test_local_close_send_close_frame_timeout(self): # - 10ms waiting for receiving a half-close # Check the timing within -1/+9ms for robustness. with self.assertCompletesWithin(19 * MS, 29 * MS): - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1006, '') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") def test_local_close_receive_close_frame_timeout(self): self.protocol.close_timeout = 10 * MS @@ -1242,8 +1242,8 @@ def test_local_close_receive_close_frame_timeout(self): # - 10ms waiting for receiving a half-close # Check the timing within -1/+9ms for robustness. with self.assertCompletesWithin(19 * MS, 29 * MS): - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1006, '') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") def test_local_close_connection_lost_timeout_after_write_eof(self): self.protocol.close_timeout = 10 * MS @@ -1256,8 +1256,8 @@ def test_local_close_connection_lost_timeout_after_write_eof(self): # HACK: disable write_eof => other end drops connection emulation. self.transport._eof = True self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1000, 'close') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") def test_local_close_connection_lost_timeout_after_close(self): self.protocol.close_timeout = 10 * MS @@ -1274,5 +1274,5 @@ def test_local_close_connection_lost_timeout_after_close(self): # HACK: disable close => other end drops connection emulation. self.transport._closing = True self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason='close')) - self.assertConnectionClosed(1000, 'close') + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") diff --git a/tests/test_uri.py b/tests/test_uri.py index ad4ec4013..b7b69c3c1 100644 --- a/tests/test_uri.py +++ b/tests/test_uri.py @@ -5,17 +5,17 @@ VALID_URIS = [ - ('ws://localhost/', (False, 'localhost', 80, '/', None)), - ('wss://localhost/', (True, 'localhost', 443, '/', None)), - ('ws://localhost/path?query', (False, 'localhost', 80, '/path?query', None)), - ('WS://LOCALHOST/PATH?QUERY', (False, 'localhost', 80, '/PATH?QUERY', None)), - ('ws://user:pass@localhost/', (False, 'localhost', 80, '/', ('user', 'pass'))), + ("ws://localhost/", (False, "localhost", 80, "/", None)), + ("wss://localhost/", (True, "localhost", 443, "/", None)), + ("ws://localhost/path?query", (False, "localhost", 80, "/path?query", None)), + ("WS://LOCALHOST/PATH?QUERY", (False, "localhost", 80, "/PATH?QUERY", None)), + ("ws://user:pass@localhost/", (False, "localhost", 80, "/", ("user", "pass"))), ] INVALID_URIS = [ - 'http://localhost/', - 'https://localhost/', - 'ws://localhost/path#fragment', + "http://localhost/", + "https://localhost/", + "ws://localhost/path#fragment", ] diff --git a/tests/test_utils.py b/tests/test_utils.py index 1b913fe7f..e5570f098 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -12,10 +12,10 @@ def apply_mask(*args, **kwargs): apply_mask_type_combos = list(itertools.product([bytes, bytearray], repeat=2)) apply_mask_test_values = [ - (b'', b'1234', b''), - (b'aBcDe', b'\x00\x00\x00\x00', b'aBcDe'), - (b'abcdABCD', b'1234', b'PPPPpppp'), - (b'abcdABCD' * 10, b'1234', b'PPPPpppp' * 10), + (b"", b"1234", b""), + (b"aBcDe", b"\x00\x00\x00\x00", b"aBcDe"), + (b"abcdABCD", b"1234", b"PPPPpppp"), + (b"abcdABCD" * 10, b"1234", b"PPPPpppp" * 10), ] def test_apply_mask(self): @@ -50,17 +50,17 @@ def test_apply_mask_non_contiguous_memoryview(self): self.assertEqual(result, data_out) def test_apply_mask_check_input_types(self): - for data_in, mask in [(None, None), (b'abcd', None), (None, b'abcd')]: + for data_in, mask in [(None, None), (b"abcd", None), (None, b"abcd")]: with self.subTest(data_in=data_in, mask=mask): with self.assertRaises(TypeError): self.apply_mask(data_in, mask) def test_apply_mask_check_mask_length(self): for data_in, mask in [ - (b'', b''), - (b'abcd', b'123'), - (b'', b'aBcDe'), - (b'12345678', b'12345678'), + (b"", b""), + (b"abcd", b"123"), + (b"", b"aBcDe"), + (b"12345678", b"12345678"), ]: with self.subTest(data_in=data_in, mask=mask): with self.assertRaises(ValueError): diff --git a/tox.ini b/tox.ini index 6cff294e5..e9623ec7d 100644 --- a/tox.ini +++ b/tox.ini @@ -12,7 +12,7 @@ commands = deps = coverage [testenv:black] -commands = black --check --skip-string-normalization src tests +commands = black --check src tests deps = black [testenv:flake8] From c6bf1ee284ca7ac42d7e0c556d6e2fb5ff97bd05 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 14:46:41 +0100 Subject: [PATCH 025/281] Drop compatibility with Python 3.4. It's EOL in three months. I'm not putting effort into supporting obsolete versions for free :-) --- .appveyor.yml | 6 +++--- .circleci/config.yml | 12 ------------ .travis.yml | 4 ++-- README.rst | 2 +- docs/changelog.rst | 5 +++++ docs/intro.rst | 5 +---- setup.cfg | 2 +- setup.py | 7 +++---- tests/test_client_server.py | 8 +------- tox.ini | 2 +- 10 files changed, 18 insertions(+), 35 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 461ff5ced..5109200b4 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -5,14 +5,14 @@ branches: skip_branch_with_pr: true environment: -# websockets only works on Python >= 3.4. - CIBW_SKIP: cp27-* cp33-* +# websockets only works on Python >= 3.5. + CIBW_SKIP: cp27-* cp33-* cp34-* CIBW_TEST_COMMAND: python -W default -m unittest WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 install: # Ensure python is Python 3. - - set PATH=C:\Python34;%PATH% + - set PATH=C:\Python37;%PATH% - cmd: python -m pip install --upgrade cibuildwheel # Create file '.cibuildwheel' so that extension build is not optional (c.f. setup.py). - cmd: touch .cibuildwheel diff --git a/.circleci/config.yml b/.circleci/config.yml index f0ca45b21..5ec5b5103 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,15 +11,6 @@ jobs: - run: sudo pip install tox codecov - run: tox -e coverage,black,flake8,isort - run: codecov - py34: - docker: - - image: circleci/python:3.4 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py34 py35: docker: - image: circleci/python:3.5 @@ -53,9 +44,6 @@ workflows: build: jobs: - main - - py34: - requires: - - main - py35: requires: - main diff --git a/.travis.yml b/.travis.yml index 3d6dd2089..c0f11357e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ env: global: - # websockets only works on Python >= 3.4. - - CIBW_SKIP="cp27-* cp33-*" + # websockets only works on Python >= 3.5. + - CIBW_SKIP="cp27-* cp33-* cp34-*" - CIBW_TEST_COMMAND="python3 -W default -m unittest" - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 diff --git a/README.rst b/README.rst index b57317d19..572647a15 100644 --- a/README.rst +++ b/README.rst @@ -124,7 +124,7 @@ Why shouldn't I use ``websockets``? and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP is minimal — just enough for a HTTP health check. * If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which - only works on Python 3. ``websockets`` requires Python ≥ 3.4. + only works on Python 3. ``websockets`` requires Python ≥ 3.5. What else? ---------- diff --git a/docs/changelog.rst b/docs/changelog.rst index a76e1212e..87e2e0ac8 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,11 +8,16 @@ Changelog *In development* +.. warning:: + + **Version 8.0 drops compatibility with Python 3.4.** + .. warning:: **Version 8.0 adds the reason phrase to the return type of the low-level API** :func:`~http.read_response` **.** + Also: * :meth:`~protocol.WebSocketCommonProtocol.send`, diff --git a/docs/intro.rst b/docs/intro.rst index 154e1d8ea..b153d2f5d 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -6,15 +6,12 @@ Getting started Requirements ------------ -``websockets`` requires Python ≥ 3.4. +``websockets`` requires Python ≥ 3.5. You should use the latest version of Python if possible. If you're using an older version, be aware that for each minor version (3.x), only the latest bugfix release (3.x.y) is officially supported. -For the best experience, you should start with Python ≥ 3.6. :mod:`asyncio` -received interesting improvements between Python 3.4 and 3.6. - .. warning:: This documentation is written for Python ≥ 3.6. If you're using an older diff --git a/setup.cfg b/setup.cfg index ad3af102f..88b9b1a33 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bdist_wheel] -python-tag = py34.py35.py36.py37 +python-tag = py35.py36.py37 [metadata] license_file = LICENSE diff --git a/setup.py b/setup.py index 3a86887aa..b9e121af7 100644 --- a/setup.py +++ b/setup.py @@ -20,8 +20,8 @@ py_version = sys.version_info[:2] -if py_version < (3, 4): - raise Exception("websockets requires Python >= 3.4.") +if py_version < (3, 5): + raise Exception("websockets requires Python >= 3.5.") packages = ['websockets', 'websockets/extensions'] @@ -56,7 +56,6 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', @@ -66,6 +65,6 @@ ext_modules=ext_modules, include_package_data=True, zip_safe=True, - python_requires='>=3.4', + python_requires='>=3.5', test_loader='unittest:TestLoader', ) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 9ba2725d9..6a06bdaf9 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -386,9 +386,6 @@ def test_explicit_event_loop(self): reply = self.loop.run_until_complete(self.client.recv()) self.assertEqual(reply, "Hello!") - # The way the legacy SSL implementation wraps sockets makes it extremely - # hard to write a test for Python 3.4. - @unittest.skipIf(sys.version_info[:2] <= (3, 4), "this test requires Python 3.5+") @with_server() def test_explicit_socket(self): class TrackedSocket(socket.socket): @@ -1132,10 +1129,7 @@ def client_context(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ssl_context.load_verify_locations(testcert) ssl_context.verify_mode = ssl.CERT_REQUIRED - # ssl.match_hostname can't match IP addresses on Python < 3.5. - # We're using IP addresses to enforce testing of IPv4 and IPv6. - if sys.version_info[:2] >= (3, 5): # pragma: no cover - ssl_context.check_hostname = True + ssl_context.check_hostname = True return ssl_context def start_server(self, **kwds): diff --git a/tox.ini b/tox.ini index e9623ec7d..de0f285d0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py34,py35,py36,py37,coverage,black,flake8,isort +envlist = py35,py36,py37,coverage,black,flake8,isort [testenv] commands = python -W default -m unittest {posargs} From db25f49496343bb6aacbe31994da83a5470dc67c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 15:03:39 +0100 Subject: [PATCH 026/281] Remove asyncio_ensure_future compatibility function. --- src/websockets/__main__.py | 7 +++---- src/websockets/compatibility.py | 7 ------- src/websockets/protocol.py | 21 ++++++--------------- src/websockets/server.py | 5 ++--- tests/test_protocol.py | 22 ++++++++-------------- 5 files changed, 19 insertions(+), 43 deletions(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 4c880c24c..b0fdaa6fe 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -6,7 +6,6 @@ import threading import websockets -from websockets.compatibility import asyncio_ensure_future from websockets.exceptions import format_close @@ -101,8 +100,8 @@ def run_client(uri, loop, inputs, stop): try: while True: - incoming = asyncio_ensure_future(websocket.recv()) - outgoing = asyncio_ensure_future(inputs.get()) + incoming = asyncio.ensure_future(websocket.recv()) + outgoing = asyncio.ensure_future(inputs.get()) done, pending = yield from asyncio.wait( [incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED ) @@ -173,7 +172,7 @@ def main(): stop = asyncio.Future(loop=loop) # Schedule the task that will manage the connection. - asyncio_ensure_future(run_client(args.uri, loop, inputs, stop), loop=loop) + asyncio.ensure_future(run_client(args.uri, loop, inputs, stop), loop=loop) # Start the event loop in a background thread. thread = threading.Thread(target=loop.run_forever) diff --git a/src/websockets/compatibility.py b/src/websockets/compatibility.py index 8b7a21a5c..2e9fcef2b 100644 --- a/src/websockets/compatibility.py +++ b/src/websockets/compatibility.py @@ -4,16 +4,9 @@ """ -import asyncio import http -# Replace with BaseEventLoop.create_task when dropping Python < 3.4.2. -try: # pragma: no cover - asyncio_ensure_future = asyncio.ensure_future # Python ≥ 3.5 -except AttributeError: # pragma: no cover - asyncio_ensure_future = getattr(asyncio, "async") # Python < 3.5 - try: # pragma: no cover # Python ≥ 3.5 SWITCHING_PROTOCOLS = http.HTTPStatus.SWITCHING_PROTOCOLS diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index d7d7282a1..62845e0a8 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -18,7 +18,6 @@ import sys import warnings -from .compatibility import asyncio_ensure_future from .exceptions import ( ConnectionClosed, InvalidState, @@ -288,17 +287,11 @@ def connection_open(self): self.state = State.OPEN logger.debug("%s - state = OPEN", self.side) # Start the task that receives incoming WebSocket messages. - self.transfer_data_task = asyncio_ensure_future( - self.transfer_data(), loop=self.loop - ) + self.transfer_data_task = self.loop.create_task(self.transfer_data()) # Start the task that sends pings at regular intervals. - self.keepalive_ping_task = asyncio_ensure_future( - self.keepalive_ping(), loop=self.loop - ) + self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping()) # Start the task that eventually closes the TCP connection. - self.close_connection_task = asyncio_ensure_future( - self.close_connection(), loop=self.loop - ) + self.close_connection_task = self.loop.create_task(self.close_connection()) # Public API @@ -519,8 +512,8 @@ def close(self, code=1000, reason=""): :meth:`close` is idempotent: it doesn't do anything once the connection is closed. - It's safe to wrap this coroutine in :func:`~asyncio.ensure_future` - since errors during connection termination aren't particularly useful. + It's safe to wrap this coroutine in :func:`~asyncio.create_task` since + errors during connection termination aren't particularly useful. ``code`` must be an :class:`int` and ``reason`` a :class:`str`. @@ -1142,9 +1135,7 @@ def fail_connection(self, code=1006, reason=""): # Start close_connection_task if the opening handshake didn't succeed. if self.close_connection_task is None: - self.close_connection_task = asyncio_ensure_future( - self.close_connection(), loop=self.loop - ) + self.close_connection_task = self.loop.create_task(self.close_connection()) def abort_keepalive_pings(self): """ diff --git a/src/websockets/server.py b/src/websockets/server.py index e207db2bc..c9e2cc23a 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -17,7 +17,6 @@ SERVICE_UNAVAILABLE, SWITCHING_PROTOCOLS, UPGRADE_REQUIRED, - asyncio_ensure_future, ) from .exceptions import ( AbortHandshake, @@ -95,7 +94,7 @@ def connection_made(self, transport): # create a race condition between the creation of the task, which # schedules its execution, and the moment the handler starts running. self.ws_server.register(self) - self.handler_task = asyncio_ensure_future(self.handler(), loop=self.loop) + self.handler_task = self.loop.create_task(self.handler()) @asyncio.coroutine def handler(self): @@ -605,7 +604,7 @@ def close(self): """ if self.close_task is None: - self.close_task = asyncio_ensure_future(self._close(), loop=self.loop) + self.close_task = self.loop.create_task(self._close()) @asyncio.coroutine def _close(self): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index cb562e647..896c0fe4b 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,13 +1,11 @@ import asyncio import contextlib -import functools import logging import os import time import unittest import unittest.mock -from websockets.compatibility import asyncio_ensure_future from websockets.exceptions import ConnectionClosed, InvalidState from websockets.framing import * from websockets.protocol import State, WebSocketCommonProtocol @@ -130,10 +128,6 @@ def delayed_drain(): local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local")) remote_close = Frame(True, OP_CLOSE, serialize_close(1000, "remote")) - @property - def ensure_future(self): - return functools.partial(asyncio_ensure_future, loop=self.loop) - def receive_frame(self, frame): """ Make the protocol receive a frame. @@ -197,7 +191,7 @@ def half_close_connection_local(self, code=1000, reason="close"): """ close_frame_data = serialize_close(code, reason) # Trigger the closing handshake from the local endpoint. - close_task = self.ensure_future(self.protocol.close(code, reason)) + close_task = self.loop.create_task(self.protocol.close(code, reason)) self.run_loop_once() # wait_for executes self.run_loop_once() # write_frame executes # Empty the outgoing data stream so we can make assertions later on. @@ -371,7 +365,7 @@ def test_closed(self): self.assertTrue(self.protocol.closed) def test_wait_closed(self): - wait_closed = asyncio_ensure_future(self.protocol.wait_closed()) + wait_closed = self.loop.create_task(self.protocol.wait_closed()) self.assertFalse(wait_closed.done()) self.close_connection() self.assertTrue(wait_closed.done()) @@ -443,7 +437,7 @@ def test_recv_binary_no_max_size(self): self.assertEqual(data, b"tea" * 342) def test_recv_queue_empty(self): - recv = self.ensure_future(self.protocol.recv()) + recv = self.loop.create_task(self.protocol.recv()) with self.assertRaises(asyncio.TimeoutError): self.loop.run_until_complete( asyncio.wait_for(asyncio.shield(recv), timeout=MS) @@ -489,7 +483,7 @@ def read_message(): self.assertConnectionFailed(1011, "") def test_recv_canceled(self): - recv = self.ensure_future(self.protocol.recv()) + recv = self.loop.create_task(self.protocol.recv()) self.loop.call_soon(recv.cancel) with self.assertRaises(asyncio.CancelledError): @@ -501,7 +495,7 @@ def test_recv_canceled(self): self.assertEqual(data, "café") def test_recv_canceled_race_condition(self): - recv = self.ensure_future( + recv = self.loop.create_task( asyncio.wait_for(self.protocol.recv(), timeout=0.000001) ) self.loop.call_soon( @@ -518,7 +512,7 @@ def test_recv_canceled_race_condition(self): self.assertEqual(data, "café") def test_recv_prevents_concurrent_calls(self): - recv = self.ensure_future(self.protocol.recv()) + recv = self.loop.create_task(self.protocol.recv()) with self.assertRaises(RuntimeError): self.loop.run_until_complete(self.protocol.recv()) @@ -1136,7 +1130,7 @@ def test_close_connection_lost(self): self.assertConnectionFailed(1006, "") def test_local_close_during_recv(self): - recv = self.ensure_future(self.protocol.recv()) + recv = self.loop.create_task(self.protocol.recv()) self.loop.call_later(MS, self.receive_frame, self.close_frame) self.loop.call_later(MS, self.receive_eof_if_client) @@ -1153,7 +1147,7 @@ def test_local_close_during_recv(self): def test_remote_close_during_send(self): self.make_drain_slow() - send = self.ensure_future(self.protocol.send("hello")) + send = self.loop.create_task(self.protocol.send("hello")) self.receive_frame(self.close_frame) self.receive_eof() From 54b1c370f74712a2131e4eca5415c76a3df5f4e5 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 16:05:16 +0100 Subject: [PATCH 027/281] Remove http.HTTPStatus compatibility definitions. --- src/websockets/compatibility.py | 77 --------------------------------- src/websockets/server.py | 32 +++++++------- tests/test_client_server.py | 39 ++++++++--------- 3 files changed, 32 insertions(+), 116 deletions(-) delete mode 100644 src/websockets/compatibility.py diff --git a/src/websockets/compatibility.py b/src/websockets/compatibility.py deleted file mode 100644 index 2e9fcef2b..000000000 --- a/src/websockets/compatibility.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -The :mod:`websockets.compatibility` module provides helpers for bridging -compatibility issues across Python versions. - -""" - -import http - - -try: # pragma: no cover - # Python ≥ 3.5 - SWITCHING_PROTOCOLS = http.HTTPStatus.SWITCHING_PROTOCOLS - OK = http.HTTPStatus.OK - BAD_REQUEST = http.HTTPStatus.BAD_REQUEST - UNAUTHORIZED = http.HTTPStatus.UNAUTHORIZED - FORBIDDEN = http.HTTPStatus.FORBIDDEN - UPGRADE_REQUIRED = http.HTTPStatus.UPGRADE_REQUIRED - INTERNAL_SERVER_ERROR = http.HTTPStatus.INTERNAL_SERVER_ERROR - SERVICE_UNAVAILABLE = http.HTTPStatus.SERVICE_UNAVAILABLE - MOVED_PERMANENTLY = http.HTTPStatus.MOVED_PERMANENTLY - FOUND = http.HTTPStatus.FOUND - SEE_OTHER = http.HTTPStatus.SEE_OTHER - TEMPORARY_REDIRECT = http.HTTPStatus.TEMPORARY_REDIRECT - PERMANENT_REDIRECT = http.HTTPStatus.PERMANENT_REDIRECT -except AttributeError: # pragma: no cover - # Python < 3.5 - class SWITCHING_PROTOCOLS: - value = 101 - phrase = "Switching Protocols" - - class OK: - value = 200 - phrase = "OK" - - class BAD_REQUEST: - value = 400 - phrase = "Bad Request" - - class UNAUTHORIZED: - value = 401 - phrase = "Unauthorized" - - class FORBIDDEN: - value = 403 - phrase = "Forbidden" - - class UPGRADE_REQUIRED: - value = 426 - phrase = "Upgrade Required" - - class INTERNAL_SERVER_ERROR: - value = 500 - phrase = "Internal Server Error" - - class SERVICE_UNAVAILABLE: - value = 503 - phrase = "Service Unavailable" - - class MOVED_PERMANENTLY: - value = 301 - phrase = "Moved Permanently" - - class FOUND: - value = 302 - phrase = "Found" - - class SEE_OTHER: - value = 303 - phrase = "See Other" - - class TEMPORARY_REDIRECT: - value = 307 - phrase = "Temporary Redirect" - - class PERMANENT_REDIRECT: - value = 308 - phrase = "Permanent Redirect" diff --git a/src/websockets/server.py b/src/websockets/server.py index c9e2cc23a..453acec4d 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -6,18 +6,11 @@ import asyncio import collections.abc import email.utils +import http import logging import sys import warnings -from .compatibility import ( - BAD_REQUEST, - FORBIDDEN, - INTERNAL_SERVER_ERROR, - SERVICE_UNAVAILABLE, - SWITCHING_PROTOCOLS, - UPGRADE_REQUIRED, -) from .exceptions import ( AbortHandshake, InvalidHandshake, @@ -123,25 +116,29 @@ def handler(self): status, headers, body = exc.status, exc.headers, exc.body elif isinstance(exc, InvalidOrigin): logger.debug("Invalid origin", exc_info=True) - status, headers, body = FORBIDDEN, [], (str(exc) + "\n").encode() + status, headers, body = ( + http.HTTPStatus.FORBIDDEN, + [], + (str(exc) + "\n").encode(), + ) elif isinstance(exc, InvalidUpgrade): logger.debug("Invalid upgrade", exc_info=True) status, headers, body = ( - UPGRADE_REQUIRED, + http.HTTPStatus.UPGRADE_REQUIRED, [("Upgrade", "websocket")], (str(exc) + "\n").encode(), ) elif isinstance(exc, InvalidHandshake): logger.debug("Invalid handshake", exc_info=True) status, headers, body = ( - BAD_REQUEST, + http.HTTPStatus.BAD_REQUEST, [], (str(exc) + "\n").encode(), ) else: logger.warning("Error in opening handshake", exc_info=True) status, headers, body = ( - INTERNAL_SERVER_ERROR, + http.HTTPStatus.INTERNAL_SERVER_ERROR, [], b"See server log for more information.\n", ) @@ -251,9 +248,6 @@ def process_request(self, path, request_headers): response is sent and the connection is closed. The HTTP status must be a :class:`~http.HTTPStatus`. - (:class:`~http.HTTPStatus` was added in Python 3.5. Use a compatible - object on earlier versions. Look at ``SWITCHING_PROTOCOLS`` in - ``websockets.compatibility`` for an example.) HTTP headers must be a :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` @@ -475,7 +469,11 @@ def handshake( # Change the response to a 503 error if the server is shutting down. if not self.ws_server.is_serving(): - early_response = SERVICE_UNAVAILABLE, [], b"Server is shutting down.\n" + early_response = ( + http.HTTPStatus.SERVICE_UNAVAILABLE, + [], + b"Server is shutting down.\n", + ) if early_response is not None: raise AbortHandshake(*early_response) @@ -515,7 +513,7 @@ def handshake( response_headers.setdefault("Date", email.utils.formatdate(usegmt=True)) response_headers.setdefault("Server", USER_AGENT) - self.write_http_response(SWITCHING_PROTOCOLS, response_headers) + self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers) self.connection_open() diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 6a06bdaf9..214b1a627 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1,6 +1,7 @@ import asyncio import contextlib import functools +import http import logging import pathlib import random @@ -15,16 +16,6 @@ import warnings from websockets.client import * -from websockets.compatibility import ( - FORBIDDEN, - FOUND, - MOVED_PERMANENTLY, - OK, - PERMANENT_REDIRECT, - SEE_OTHER, - TEMPORARY_REDIRECT, - UNAUTHORIZED, -) from websockets.exceptions import ( ConnectionClosed, InvalidHandshake, @@ -174,14 +165,14 @@ class UnauthorizedServerProtocol(WebSocketServerProtocol): @asyncio.coroutine def process_request(self, path, request_headers): # Test returning headers as a Headers instance (1/3) - return UNAUTHORIZED, Headers([("X-Access", "denied")]), b"" + return http.HTTPStatus.UNAUTHORIZED, Headers([("X-Access", "denied")]), b"" class ForbiddenServerProtocol(WebSocketServerProtocol): @asyncio.coroutine def process_request(self, path, request_headers): # Test returning headers as a dict (2/3) - return FORBIDDEN, {"X-Access": "denied"}, b"" + return http.HTTPStatus.FORBIDDEN, {"X-Access": "denied"}, b"" class HealthCheckServerProtocol(WebSocketServerProtocol): @@ -189,7 +180,7 @@ class HealthCheckServerProtocol(WebSocketServerProtocol): def process_request(self, path, request_headers): # Test returning headers as a list of pairs (3/3) if path == "/__health__/": - return OK, [("X-Access", "OK")], b"status = green\n" + return http.HTTPStatus.OK, [("X-Access", "OK")], b"status = green\n" class SlowServerProtocol(WebSocketServerProtocol): @@ -352,11 +343,11 @@ def test_basic(self): @with_server() def test_redirect(self): redirect_statuses = [ - MOVED_PERMANENTLY, - FOUND, - SEE_OTHER, - TEMPORARY_REDIRECT, - PERMANENT_REDIRECT, + http.HTTPStatus.MOVED_PERMANENTLY, + http.HTTPStatus.FOUND, + http.HTTPStatus.SEE_OTHER, + http.HTTPStatus.TEMPORARY_REDIRECT, + http.HTTPStatus.PERMANENT_REDIRECT, ] for status in redirect_statuses: with temp_test_redirecting_server(self, status): @@ -366,7 +357,7 @@ def test_redirect(self): self.assertEqual(reply, "Hello!") def test_infinite_redirect(self): - with temp_test_redirecting_server(self, FOUND): + with temp_test_redirecting_server(self, http.HTTPStatus.FOUND): self.server = self.redirecting_server with self.assertRaises(InvalidHandshake): with temp_test_client(self): @@ -374,7 +365,9 @@ def test_infinite_redirect(self): @with_server() def test_redirect_missing_location(self): - with temp_test_redirecting_server(self, FOUND, include_location=False): + with temp_test_redirecting_server( + self, http.HTTPStatus.FOUND, include_location=False + ): with self.assertRaises(InvalidMessage): with temp_test_client(self): self.fail("Did not raise") # pragma: no cover @@ -449,7 +442,7 @@ def test_unix_socket(self): client_socket.close() self.stop_server() - @with_server(process_request=lambda p, rh: (OK, [], b"OK\n")) + @with_server(process_request=lambda p, rh: (http.HTTPStatus.OK, [], b"OK\n")) def test_process_request_argument(self): response = self.loop.run_until_complete(self.make_http_request("/")) @@ -1156,7 +1149,9 @@ def test_ws_uri_is_rejected(self): @with_server() def test_redirect_insecure(self): - with temp_test_redirecting_server(self, FOUND, force_insecure=True): + with temp_test_redirecting_server( + self, http.HTTPStatus.FOUND, force_insecure=True + ): with self.assertRaises(InvalidHandshake): with temp_test_client(self): self.fail("Did not raise") # pragma: no cover From a1541526172de88020d925cd61703a7a89b8595c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 16:20:12 +0100 Subject: [PATCH 028/281] Merge Python 3.5+ packages. --- setup.py | 3 - src/websockets/client.py | 53 ++++++++++++-- src/websockets/py35/__init__.py | 2 - src/websockets/py35/client.py | 48 ------------- src/websockets/py35/server.py | 22 ------ src/websockets/server.py | 30 ++++++-- tests/py35/__init__.py | 0 tests/py35/_test_client_server.py | 112 ------------------------------ tests/test_client_server.py | 99 +++++++++++++++++++++++++- 9 files changed, 169 insertions(+), 200 deletions(-) delete mode 100644 src/websockets/py35/__init__.py delete mode 100644 src/websockets/py35/client.py delete mode 100644 src/websockets/py35/server.py delete mode 100644 tests/py35/__init__.py delete mode 100644 tests/py35/_test_client_server.py diff --git a/setup.py b/setup.py index b9e121af7..78d6f7af4 100644 --- a/setup.py +++ b/setup.py @@ -25,9 +25,6 @@ packages = ['websockets', 'websockets/extensions'] -if py_version >= (3, 5): - packages.append('websockets/py35') - if py_version >= (3, 6): packages.append('websockets/py36') diff --git a/src/websockets/client.py b/src/websockets/client.py index 66034ce25..cb2e3ff7f 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -506,11 +506,58 @@ def __iter__(self): # pragma: no cover self.ws_client = protocol return protocol + async def __aenter__(self): + return await self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.ws_client.close() + + async def __await_impl__(self): + # Duplicated with __iter__ because Python 3.7 requires an async function + # (as explained in __await__ below) which Python 3.4 doesn't support. + for redirects in range(self.MAX_REDIRECTS_ALLOWED): + transport, protocol = await self._creating_connection() + + try: + try: + await protocol.handshake( + self._wsuri, + origin=self._origin, + available_extensions=protocol.available_extensions, + available_subprotocols=protocol.available_subprotocols, + extra_headers=protocol.extra_headers, + ) + break # redirection chain ended + except Exception: + protocol.fail_connection() + await protocol.wait_closed() + raise + except RedirectHandshake as e: + if self._wsuri.secure and not e.wsuri.secure: + raise InvalidHandshake("Redirect dropped TLS") + self._wsuri = e.wsuri + continue # redirection chain continues + else: + raise InvalidHandshake("Maximum redirects exceeded") + + self.ws_client = protocol + return protocol + + def __await__(self): + # __await__() must return a type that I don't know how to obtain except + # by calling __await__() on the return value of an async function. + # I'm not finding a better way to take advantage of PEP 492. + return self.__await_impl__().__await__() + # We can't define __await__ on Python < 3.5.1 because asyncio.ensure_future # didn't accept arbitrary awaitables until Python 3.5.1. We don't define # __aenter__ and __aexit__ either on Python < 3.5.1 to keep things simple. -if sys.version_info[:3] <= (3, 5, 0): # pragma: no cover +if sys.version_info[:3] < (3, 5, 1): # pragma: no cover + + del Connect.__aenter__ + del Connect.__aexit__ + del Connect.__await__ @asyncio.coroutine def connect(*args, **kwds): @@ -519,9 +566,5 @@ def connect(*args, **kwds): connect.__doc__ = Connect.__doc__ else: - from .py35.client import __aenter__, __aexit__, __await__ - Connect.__aenter__ = __aenter__ - Connect.__aexit__ = __aexit__ - Connect.__await__ = __await__ connect = Connect diff --git a/src/websockets/py35/__init__.py b/src/websockets/py35/__init__.py deleted file mode 100644 index 9612d9dd7..000000000 --- a/src/websockets/py35/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This package contains code using async / await syntax added in Python 3.5. -# It cannot be imported on Python < 3.5 because it triggers syntax errors. diff --git a/src/websockets/py35/client.py b/src/websockets/py35/client.py deleted file mode 100644 index ccb098483..000000000 --- a/src/websockets/py35/client.py +++ /dev/null @@ -1,48 +0,0 @@ -from ..exceptions import InvalidHandshake, RedirectHandshake - - -async def __aenter__(self): - return await self - - -async def __aexit__(self, exc_type, exc_value, traceback): - await self.ws_client.close() - - -async def __await_impl__(self): - # Duplicated with __iter__ because Python 3.7 requires an async function - # (as explained in __await__ below) which Python 3.4 doesn't support. - for redirects in range(self.MAX_REDIRECTS_ALLOWED): - transport, protocol = await self._creating_connection() - - try: - try: - await protocol.handshake( - self._wsuri, - origin=self._origin, - available_extensions=protocol.available_extensions, - available_subprotocols=protocol.available_subprotocols, - extra_headers=protocol.extra_headers, - ) - break # redirection chain ended - except Exception: - protocol.fail_connection() - await protocol.wait_closed() - raise - except RedirectHandshake as e: - if self._wsuri.secure and not e.wsuri.secure: - raise InvalidHandshake("Redirect dropped TLS") - self._wsuri = e.wsuri - continue # redirection chain continues - else: - raise InvalidHandshake("Maximum redirects exceeded") - - self.ws_client = protocol - return protocol - - -def __await__(self): - # __await__() must return a type that I don't know how to obtain except - # by calling __await__() on the return value of an async function. - # I'm not finding a better way to take advantage of PEP 492. - return __await_impl__(self).__await__() diff --git a/src/websockets/py35/server.py b/src/websockets/py35/server.py deleted file mode 100644 index 41a3675e3..000000000 --- a/src/websockets/py35/server.py +++ /dev/null @@ -1,22 +0,0 @@ -async def __aenter__(self): - return await self - - -async def __aexit__(self, exc_type, exc_value, traceback): - self.ws_server.close() - await self.ws_server.wait_closed() - - -async def __await_impl__(self): - # Duplicated with __iter__ because Python 3.7 requires an async function - # (as explained in __await__ below) which Python 3.4 doesn't support. - server = await self._creating_server - self.ws_server.wrap(server) - return self.ws_server - - -def __await__(self): - # __await__() must return a type that I don't know how to obtain except - # by calling __await__() on the return value of an async function. - # I'm not finding a better way to take advantage of PEP 492. - return __await_impl__(self).__await__() diff --git a/src/websockets/server.py b/src/websockets/server.py index 453acec4d..424d08922 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -850,6 +850,26 @@ def __iter__(self): # pragma: no cover self.ws_server.wrap(server) return self.ws_server + async def __aenter__(self): + return await self + + async def __aexit__(self, exc_type, exc_value, traceback): + self.ws_server.close() + await self.ws_server.wait_closed() + + async def __await_impl__(self): + # Duplicated with __iter__ because Python 3.7 requires an async function + # (as explained in __await__ below) which Python 3.4 doesn't support. + server = await self._creating_server + self.ws_server.wrap(server) + return self.ws_server + + def __await__(self): + # __await__() must return a type that I don't know how to obtain except + # by calling __await__() on the return value of an async function. + # I'm not finding a better way to take advantage of PEP 492. + return self.__await_impl__().__await__() + def unix_serve(ws_handler, path, **kwargs): """ @@ -869,7 +889,11 @@ def unix_serve(ws_handler, path, **kwargs): # We can't define __await__ on Python < 3.5.1 because asyncio.ensure_future # didn't accept arbitrary awaitables until Python 3.5.1. We don't define # __aenter__ and __aexit__ either on Python < 3.5.1 to keep things simple. -if sys.version_info[:3] <= (3, 5, 0): # pragma: no cover +if sys.version_info[:3] < (3, 5, 1): # pragma: no cover + + del Serve.__aenter__ + del Serve.__aexit__ + del Serve.__await__ @asyncio.coroutine def serve(*args, **kwds): @@ -878,9 +902,5 @@ def serve(*args, **kwds): serve.__doc__ = Serve.__doc__ else: - from .py35.server import __aenter__, __aexit__, __await__ - Serve.__aenter__ = __aenter__ - Serve.__aexit__ = __aexit__ - Serve.__await__ = __await__ serve = Serve diff --git a/tests/py35/__init__.py b/tests/py35/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/py35/_test_client_server.py b/tests/py35/_test_client_server.py deleted file mode 100644 index 869c379b8..000000000 --- a/tests/py35/_test_client_server.py +++ /dev/null @@ -1,112 +0,0 @@ -# Tests containing Python 3.5+ syntax, extracted from test_client_server.py. - -import asyncio -import pathlib -import socket -import sys -import tempfile -import unittest - -from websockets.client import * -from websockets.protocol import State -from websockets.server import * - -from ..test_client_server import get_server_uri, handler - - -class AsyncAwaitTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - - def test_client(self): - start_server = serve(handler, "localhost", 0) - server = self.loop.run_until_complete(start_server) - - async def run_client(): - # Await connect. - client = await connect(get_server_uri(server)) - self.assertEqual(client.state, State.OPEN) - await client.close() - self.assertEqual(client.state, State.CLOSED) - - self.loop.run_until_complete(run_client()) - - server.close() - self.loop.run_until_complete(server.wait_closed()) - - def test_server(self): - async def run_server(): - # Await serve. - server = await serve(handler, "localhost", 0) - self.assertTrue(server.sockets) - server.close() - await server.wait_closed() - self.assertFalse(server.sockets) - - self.loop.run_until_complete(run_server()) - - -class ContextManagerTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) - def test_client(self): - start_server = serve(handler, "localhost", 0) - server = self.loop.run_until_complete(start_server) - - async def run_client(): - # Use connect as an asynchronous context manager. - async with connect(get_server_uri(server)) as client: - self.assertEqual(client.state, State.OPEN) - - # Check that exiting the context manager closed the connection. - self.assertEqual(client.state, State.CLOSED) - - self.loop.run_until_complete(run_client()) - - server.close() - self.loop.run_until_complete(server.wait_closed()) - - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) - def test_server(self): - async def run_server(): - # Use serve as an asynchronous context manager. - async with serve(handler, "localhost", 0) as server: - self.assertTrue(server.sockets) - - # Check that exiting the context manager closed the server. - self.assertFalse(server.sockets) - - self.loop.run_until_complete(run_server()) - - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) - @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets") - def test_unix_server(self): - async def run_server(path): - async with unix_serve(handler, path) as server: - self.assertTrue(server.sockets) - - # Check that exiting the context manager closed the server. - self.assertFalse(server.sockets) - - with tempfile.TemporaryDirectory() as temp_dir: - path = bytes(pathlib.Path(temp_dir) / "websockets") - self.loop.run_until_complete(run_server(path)) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 214b1a627..633e097bc 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1279,9 +1279,102 @@ def run_server(): self.loop.run_until_complete(run_server()) -if sys.version_info[:2] >= (3, 5): # pragma: no cover - from .py35._test_client_server import AsyncAwaitTests # noqa - from .py35._test_client_server import ContextManagerTests # noqa +class AsyncAwaitTests(unittest.TestCase): + def setUp(self): + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.close() + + def test_client(self): + start_server = serve(handler, "localhost", 0) + server = self.loop.run_until_complete(start_server) + + async def run_client(): + # Await connect. + client = await connect(get_server_uri(server)) + self.assertEqual(client.state, State.OPEN) + await client.close() + self.assertEqual(client.state, State.CLOSED) + + self.loop.run_until_complete(run_client()) + + server.close() + self.loop.run_until_complete(server.wait_closed()) + + def test_server(self): + async def run_server(): + # Await serve. + server = await serve(handler, "localhost", 0) + self.assertTrue(server.sockets) + server.close() + await server.wait_closed() + self.assertFalse(server.sockets) + + self.loop.run_until_complete(run_server()) + + +class ContextManagerTests(unittest.TestCase): + def setUp(self): + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.close() + + # Asynchronous context managers are only enabled on Python ≥ 3.5.1. + @unittest.skipIf( + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" + ) + def test_client(self): + start_server = serve(handler, "localhost", 0) + server = self.loop.run_until_complete(start_server) + + async def run_client(): + # Use connect as an asynchronous context manager. + async with connect(get_server_uri(server)) as client: + self.assertEqual(client.state, State.OPEN) + + # Check that exiting the context manager closed the connection. + self.assertEqual(client.state, State.CLOSED) + + self.loop.run_until_complete(run_client()) + + server.close() + self.loop.run_until_complete(server.wait_closed()) + + # Asynchronous context managers are only enabled on Python ≥ 3.5.1. + @unittest.skipIf( + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" + ) + def test_server(self): + async def run_server(): + # Use serve as an asynchronous context manager. + async with serve(handler, "localhost", 0) as server: + self.assertTrue(server.sockets) + + # Check that exiting the context manager closed the server. + self.assertFalse(server.sockets) + + self.loop.run_until_complete(run_server()) + + # Asynchronous context managers are only enabled on Python ≥ 3.5.1. + @unittest.skipIf( + sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" + ) + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets") + def test_unix_server(self): + async def run_server(path): + async with unix_serve(handler, path) as server: + self.assertTrue(server.sockets) + + # Check that exiting the context manager closed the server. + self.assertFalse(server.sockets) + + with tempfile.TemporaryDirectory() as temp_dir: + path = bytes(pathlib.Path(temp_dir) / "websockets") + self.loop.run_until_complete(run_server(path)) if sys.version_info[:2] >= (3, 6): # pragma: no cover From b5c40d597ed664f54382d3555b4f1d4cbd8c13d7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 17:20:23 +0100 Subject: [PATCH 029/281] Switch to async / await syntax. --- compliance/test_client.py | 34 ++++----- compliance/test_server.py | 7 +- src/websockets/__main__.py | 11 ++- src/websockets/client.py | 45 ++---------- src/websockets/framing.py | 14 ++-- src/websockets/http.py | 25 +++---- src/websockets/protocol.py | 140 +++++++++++++++--------------------- src/websockets/server.py | 48 +++++-------- tests/test_client_server.py | 42 +++++------ tests/test_protocol.py | 13 ++-- 10 files changed, 144 insertions(+), 235 deletions(-) diff --git a/compliance/test_client.py b/compliance/test_client.py index 382d06a05..1c1d4416a 100644 --- a/compliance/test_client.py +++ b/compliance/test_client.py @@ -16,42 +16,38 @@ AGENT = 'websockets' -@asyncio.coroutine -def get_case_count(server): +async def get_case_count(server): uri = server + '/getCaseCount' - ws = yield from websockets.connect(uri) - msg = yield from ws.recv() - yield from ws.close() + ws = await websockets.connect(uri) + msg = await ws.recv() + await ws.close() return json.loads(msg) -@asyncio.coroutine -def run_case(server, case, agent): +async def run_case(server, case, agent): uri = server + '/runCase?case={}&agent={}'.format(case, agent) - ws = yield from websockets.connect(uri, max_size=2 ** 25, max_queue=1) + ws = await websockets.connect(uri, max_size=2 ** 25, max_queue=1) while True: try: - msg = yield from ws.recv() - yield from ws.send(msg) + msg = await ws.recv() + await ws.send(msg) except websockets.ConnectionClosed: break -@asyncio.coroutine -def update_reports(server, agent): +async def update_reports(server, agent): uri = server + '/updateReports?agent={}'.format(agent) - ws = yield from websockets.connect(uri) - yield from ws.close() + ws = await websockets.connect(uri) + await ws.close() -@asyncio.coroutine -def run_tests(server, agent): - cases = yield from get_case_count(server) +async def run_tests(server, agent): + cases = await get_case_count(server) for case in range(1, cases + 1): print("Running test case {} out of {}".format(case, cases), end="\r") - yield from run_case(server, case, agent) + await run_case(server, case, agent) print("Ran {} test cases ".format(cases)) - yield from update_reports(server, agent) + await update_reports(server, agent) main = run_tests(SERVER, urllib.parse.quote(AGENT)) diff --git a/compliance/test_server.py b/compliance/test_server.py index 75e0e3044..ac5990d16 100644 --- a/compliance/test_server.py +++ b/compliance/test_server.py @@ -10,12 +10,11 @@ # logging.getLogger('websockets').setLevel(logging.DEBUG) -@asyncio.coroutine -def echo(ws, path): +async def echo(ws, path): while True: try: - msg = yield from ws.recv() - yield from ws.send(msg) + msg = await ws.recv() + await ws.send(msg) except websockets.ConnectionClosed: break diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index b0fdaa6fe..350fc06e8 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -87,10 +87,9 @@ def print_over_input(string): sys.stdout.flush() -@asyncio.coroutine -def run_client(uri, loop, inputs, stop): +async def run_client(uri, loop, inputs, stop): try: - websocket = yield from websockets.connect(uri) + websocket = await websockets.connect(uri) except Exception as exc: print_over_input("Failed to connect to {}: {}.".format(uri, exc)) exit_from_event_loop_thread(loop, stop) @@ -102,7 +101,7 @@ def run_client(uri, loop, inputs, stop): while True: incoming = asyncio.ensure_future(websocket.recv()) outgoing = asyncio.ensure_future(inputs.get()) - done, pending = yield from asyncio.wait( + done, pending = await asyncio.wait( [incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED ) @@ -122,13 +121,13 @@ def run_client(uri, loop, inputs, stop): if outgoing in done: message = outgoing.result() - yield from websocket.send(message) + await websocket.send(message) if stop in done: break finally: - yield from websocket.close() + await websocket.close() close_status = format_close(websocket.close_code, websocket.close_reason) print_over_input( diff --git a/src/websockets/client.py b/src/websockets/client.py index cb2e3ff7f..46dd1b447 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -79,8 +79,7 @@ def write_http_request(self, path, headers): self.writer.write(request.encode()) - @asyncio.coroutine - def read_http_response(self): + async def read_http_response(self): """ Read status line and headers from the HTTP response. @@ -93,7 +92,7 @@ def read_http_response(self): """ try: - status_code, reason, headers = yield from read_response(self.reader) + status_code, reason, headers = await read_response(self.reader) except ValueError as exc: raise InvalidMessage("Malformed HTTP message") from exc @@ -220,8 +219,7 @@ def process_subprotocol(headers, available_subprotocols): return subprotocol - @asyncio.coroutine - def handshake( + async def handshake( self, wsuri, origin=None, @@ -289,7 +287,7 @@ def handshake( self.write_http_request(wsuri.resource_name, request_headers) - status_code, response_headers = yield from self.read_http_response() + status_code, response_headers = await self.read_http_response() if status_code in (301, 302, 303, 307, 308): if "Location" not in response_headers: raise InvalidMessage("Redirect response missing Location") @@ -477,34 +475,8 @@ def _creating_connection(self): return self._loop.create_connection(factory, host, port, **self._kwds) @asyncio.coroutine - def __iter__(self): # pragma: no cover - for redirects in range(self.MAX_REDIRECTS_ALLOWED): - transport, protocol = yield from self._creating_connection() - - try: - try: - yield from protocol.handshake( - self._wsuri, - origin=self._origin, - available_extensions=protocol.available_extensions, - available_subprotocols=protocol.available_subprotocols, - extra_headers=protocol.extra_headers, - ) - break # redirection chain ended - except Exception: - protocol.fail_connection() - yield from protocol.wait_closed() - raise - except RedirectHandshake as e: - if self._wsuri.secure and not e.wsuri.secure: - raise InvalidHandshake("Redirect dropped TLS") - self._wsuri = e.wsuri - continue # redirection chain continues - else: - raise InvalidHandshake("Maximum redirects exceeded") - - self.ws_client = protocol - return protocol + def __iter__(self): + return self.__await_impl__() async def __aenter__(self): return await self @@ -513,8 +485,6 @@ async def __aexit__(self, exc_type, exc_value, traceback): await self.ws_client.close() async def __await_impl__(self): - # Duplicated with __iter__ because Python 3.7 requires an async function - # (as explained in __await__ below) which Python 3.4 doesn't support. for redirects in range(self.MAX_REDIRECTS_ALLOWED): transport, protocol = await self._creating_connection() @@ -559,8 +529,7 @@ def __await__(self): del Connect.__aexit__ del Connect.__await__ - @asyncio.coroutine - def connect(*args, **kwds): + async def connect(*args, **kwds): return Connect(*args, **kwds).__iter__() connect.__doc__ = Connect.__doc__ diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 8b0242715..c6b5564f5 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -9,7 +9,6 @@ """ -import asyncio import collections import io import random @@ -73,8 +72,7 @@ def __new__(cls, fin, opcode, data, rsv1=False, rsv2=False, rsv3=False): return FrameData.__new__(cls, fin, opcode, data, rsv1, rsv2, rsv3) @classmethod - @asyncio.coroutine - def read(cls, reader, *, mask, max_size=None, extensions=None): + async def read(cls, reader, *, mask, max_size=None, extensions=None): """ Read a WebSocket frame and return a :class:`Frame` object. @@ -97,7 +95,7 @@ def read(cls, reader, *, mask, max_size=None, extensions=None): """ # Read the header. - data = yield from reader(2) + data = await reader(2) head1, head2 = struct.unpack("!BB", data) # While not Pythonic, this is marginally faster than calling bool(). @@ -112,10 +110,10 @@ def read(cls, reader, *, mask, max_size=None, extensions=None): length = head2 & 0b01111111 if length == 126: - data = yield from reader(2) + data = await reader(2) length, = struct.unpack("!H", data) elif length == 127: - data = yield from reader(8) + data = await reader(8) length, = struct.unpack("!Q", data) if max_size is not None and length > max_size: raise PayloadTooBig( @@ -124,10 +122,10 @@ def read(cls, reader, *, mask, max_size=None, extensions=None): ) ) if mask: - mask_bits = yield from reader(4) + mask_bits = await reader(4) # Read the data. - data = yield from reader(length) + data = await reader(length) if mask: data = apply_mask(data, mask_bits) diff --git a/src/websockets/http.py b/src/websockets/http.py index ea17e0a2e..5e04e53bd 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -7,7 +7,6 @@ """ -import asyncio import collections.abc import re import sys @@ -49,8 +48,7 @@ _value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") -@asyncio.coroutine -def read_request(stream): +async def read_request(stream): """ Read an HTTP/1.1 GET request from ``stream``. @@ -76,7 +74,7 @@ def read_request(stream): # version and because path isn't checked. Since WebSocket software tends # to implement HTTP/1.1 strictly, there's little need for lenient parsing. - request_line = yield from read_line(stream) + request_line = await read_line(stream) # This may raise "ValueError: not enough values to unpack" method, path, version = request_line.split(b" ", 2) @@ -87,13 +85,12 @@ def read_request(stream): raise ValueError("Unsupported HTTP version: %r" % version) path = path.decode("ascii", "surrogateescape") - headers = yield from read_headers(stream) + headers = await read_headers(stream) return path, headers -@asyncio.coroutine -def read_response(stream): +async def read_response(stream): """ Read an HTTP/1.1 response from ``stream``. @@ -117,7 +114,7 @@ def read_response(stream): # As in read_request, parsing is simple because a fixed value is expected # for version, status_code is a 3-digit number, and reason can be ignored. - status_line = yield from read_line(stream) + status_line = await read_line(stream) # This may raise "ValueError: not enough values to unpack" version, status_code, reason = status_line.split(b" ", 2) @@ -132,13 +129,12 @@ def read_response(stream): raise ValueError("Invalid HTTP reason phrase: %r" % reason) reason = reason.decode() - headers = yield from read_headers(stream) + headers = await read_headers(stream) return status_code, reason, headers -@asyncio.coroutine -def read_headers(stream): +async def read_headers(stream): """ Read HTTP headers from ``stream``. @@ -155,7 +151,7 @@ def read_headers(stream): headers = Headers() for _ in range(MAX_HEADERS + 1): - line = yield from read_line(stream) + line = await read_line(stream) if line == b"": break @@ -177,8 +173,7 @@ def read_headers(stream): return headers -@asyncio.coroutine -def read_line(stream): +async def read_line(stream): """ Read a single line from ``stream``. @@ -188,7 +183,7 @@ def read_line(stream): """ # Security: this is bounded by the StreamReader's limit (default = 32kB). - line = yield from stream.readline() + line = await stream.readline() # Security: this guarantees header values are small (hard-coded = 4kB) if len(line) > MAX_LINE: raise ValueError("Line too long") diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 62845e0a8..7f20bed62 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -16,7 +16,6 @@ import random import struct import sys -import warnings from .exceptions import ( ConnectionClosed, @@ -33,15 +32,6 @@ logger = logging.getLogger(__name__) -# On Python ≥ 3.7, silence a deprecation warning that we can't address before -# dropping support for Python < 3.5. -warnings.filterwarnings( - action="ignore", - message=r"'with \(yield from lock\)' is deprecated use 'async with lock' instead", - category=DeprecationWarning, -) - - # A WebSocket connection goes through the following four states, in order: @@ -346,8 +336,7 @@ def closed(self): """ return self.state is State.CLOSED - @asyncio.coroutine - def wait_closed(self): + async def wait_closed(self): """ Wait until the connection is closed. @@ -357,10 +346,9 @@ def wait_closed(self): of its cause, in tasks that interact with the WebSocket connection. """ - yield from asyncio.shield(self.connection_lost_waiter) + await asyncio.shield(self.connection_lost_waiter) - @asyncio.coroutine - def recv(self): + async def recv(self): """ This coroutine receives the next message. @@ -392,7 +380,7 @@ def recv(self): "is already waiting for the next message" ) - # Don't yield from self.ensure_open() here: + # Don't await self.ensure_open() here: # - messages could be available in the queue even if the connection # is closed; # - messages could be received before the closing frame even if the @@ -406,7 +394,7 @@ def recv(self): try: # If asyncio.wait() is canceled, it doesn't cancel # pop_message_waiter and self.transfer_data_task. - yield from asyncio.wait( + await asyncio.wait( [pop_message_waiter, self.transfer_data_task], loop=self.loop, return_when=asyncio.FIRST_COMPLETED, @@ -424,7 +412,7 @@ def recv(self): assert self.state in [State.CLOSING, State.CLOSED] # Wait until the connection is closed to raise # ConnectionClosed with the correct code and reason. - yield from self.ensure_open() + await self.ensure_open() # Pop a message from the queue. message = self.messages.popleft() @@ -436,8 +424,7 @@ def recv(self): return message - @asyncio.coroutine - def send(self, data): + async def send(self, data): """ This coroutine sends a message. @@ -453,7 +440,7 @@ def send(self, data): It raises a :exc:`TypeError` for other inputs. """ - yield from self.ensure_open() + await self.ensure_open() # Unfragmented message -- this case must be handled first because # strings and bytes-like objects are iterable. @@ -464,7 +451,7 @@ def send(self, data): # Perhaps data is an iterator, see below. pass else: - yield from self.write_frame(True, opcode, data) + await self.write_frame(True, opcode, data) return # Fragmented message -- regular iterator. @@ -478,7 +465,7 @@ def send(self, data): except StopIteration: return opcode, data = prepare_data(data) - yield from self.write_frame(False, opcode, data) + await self.write_frame(False, opcode, data) # Other fragments. for data in iter_data: @@ -488,10 +475,10 @@ def send(self, data): # complete it. This makes the connection unusable. self.fail_connection(1011) raise TypeError("data contains inconsistent types") - yield from self.write_frame(False, OP_CONT, data) + await self.write_frame(False, OP_CONT, data) # Final fragment. - yield from self.write_frame(True, OP_CONT, b"") + await self.write_frame(True, OP_CONT, b"") # Fragmented message -- asynchronous iterator @@ -500,8 +487,7 @@ def send(self, data): else: raise TypeError("data must be bytes, str, or iterable") - @asyncio.coroutine - def close(self, code=1000, reason=""): + async def close(self, code=1000, reason=""): """ This coroutine performs the closing handshake. @@ -519,7 +505,7 @@ def close(self, code=1000, reason=""): """ try: - yield from asyncio.wait_for( + await asyncio.wait_for( self.write_close_frame(serialize_close(code, reason)), self.close_timeout, loop=self.loop, @@ -541,17 +527,16 @@ def close(self, code=1000, reason=""): # If close() is canceled during the wait, self.transfer_data_task # is canceled before the timeout elapses (on Python ≥ 3.4.3). # This helps closing connections when shutting down a server. - yield from asyncio.wait_for( + await asyncio.wait_for( self.transfer_data_task, self.close_timeout, loop=self.loop ) except (asyncio.TimeoutError, asyncio.CancelledError): pass # Wait for the close connection task to close the TCP connection. - yield from asyncio.shield(self.close_connection_task) + await asyncio.shield(self.close_connection_task) - @asyncio.coroutine - def ping(self, data=None): + async def ping(self, data=None): """ This coroutine sends a ping. @@ -570,7 +555,7 @@ def ping(self, data=None): (which will be encoded to UTF-8) or a bytes-like object. """ - yield from self.ensure_open() + await self.ensure_open() if data is not None: data = encode_data(data) @@ -585,12 +570,11 @@ def ping(self, data=None): self.pings[data] = asyncio.Future(loop=self.loop) - yield from self.write_frame(True, OP_PING, data) + await self.write_frame(True, OP_PING, data) return asyncio.shield(self.pings[data]) - @asyncio.coroutine - def pong(self, data=b""): + async def pong(self, data=b""): """ This coroutine sends a pong. @@ -601,16 +585,15 @@ def pong(self, data=b""): bytes-like object. """ - yield from self.ensure_open() + await self.ensure_open() data = encode_data(data) - yield from self.write_frame(True, OP_PONG, data) + await self.write_frame(True, OP_PONG, data) # Private methods - no guarantees. - @asyncio.coroutine - def ensure_open(self): + async def ensure_open(self): """ Check that the WebSocket connection is open. @@ -623,7 +606,7 @@ def ensure_open(self): # self.close_connection_task may be closing it, going straight # from OPEN to CLOSED. if self.transfer_data_task.done(): - yield from asyncio.shield(self.close_connection_task) + await asyncio.shield(self.close_connection_task) raise ConnectionClosed( self.close_code, self.close_reason ) from self.transfer_data_exc @@ -642,7 +625,7 @@ def ensure_open(self): # CLOSING state also occurs when failing the connection. In that # case self.close_connection_task will complete even faster. if self.close_code is None: - yield from asyncio.shield(self.close_connection_task) + await asyncio.shield(self.close_connection_task) raise ConnectionClosed( self.close_code, self.close_reason ) from self.transfer_data_exc @@ -651,8 +634,7 @@ def ensure_open(self): assert self.state is State.CONNECTING raise InvalidState("WebSocket connection isn't established yet") - @asyncio.coroutine - def transfer_data(self): + async def transfer_data(self): """ Read incoming messages and put them in a queue. @@ -661,7 +643,7 @@ def transfer_data(self): """ try: while True: - message = yield from self.read_message() + message = await self.read_message() # Exit the loop when receiving a close frame. if message is None: @@ -671,7 +653,7 @@ def transfer_data(self): while len(self.messages) >= self.max_queue: self._put_message_waiter = asyncio.Future(loop=self.loop) try: - yield from self._put_message_waiter + await self._put_message_waiter finally: self._put_message_waiter = None @@ -719,8 +701,7 @@ def transfer_data(self): self.transfer_data_exc = exc self.fail_connection(1011) - @asyncio.coroutine - def read_message(self): + async def read_message(self): """ Read a single message from the connection. @@ -729,7 +710,7 @@ def read_message(self): Return ``None`` when the closing handshake is started. """ - frame = yield from self.read_data_frame(max_size=self.max_size) + frame = await self.read_data_frame(max_size=self.max_size) # A close frame was received. if frame is None: @@ -781,7 +762,7 @@ def append(frame): append(frame) while not frame.fin: - frame = yield from self.read_data_frame(max_size=max_size) + frame = await self.read_data_frame(max_size=max_size) if frame is None: raise WebSocketProtocolError("Incomplete fragmented message") if frame.opcode != OP_CONT: @@ -790,8 +771,7 @@ def append(frame): return ("" if text else b"").join(chunks) - @asyncio.coroutine - def read_data_frame(self, max_size): + async def read_data_frame(self, max_size): """ Read a single data frame from the connection. @@ -802,7 +782,7 @@ def read_data_frame(self, max_size): """ # 6.2. Receiving Data while True: - frame = yield from self.read_frame(max_size) + frame = await self.read_frame(max_size) # 5.5. Control Frames if frame.opcode == OP_CLOSE: @@ -812,7 +792,7 @@ def read_data_frame(self, max_size): # Echo the original data instead of re-serializing it with # serialize_close() because that fails when the close frame is # empty and parse_close() synthetizes a 1005 close code. - yield from self.write_close_frame(frame.data) + await self.write_close_frame(frame.data) return elif frame.opcode == OP_PING: @@ -822,7 +802,7 @@ def read_data_frame(self, max_size): logger.debug( "%s - received ping, sending pong: %s", self.side, ping_hex ) - yield from self.pong(frame.data) + await self.pong(frame.data) elif frame.opcode == OP_PONG: # Acknowledge pings on solicited pongs. @@ -861,13 +841,12 @@ def read_data_frame(self, max_size): else: return frame - @asyncio.coroutine - def read_frame(self, max_size): + async def read_frame(self, max_size): """ Read a single frame from the connection. """ - frame = yield from Frame.read( + frame = await Frame.read( self.reader.readexactly, mask=not self.is_client, max_size=max_size, @@ -876,8 +855,7 @@ def read_frame(self, max_size): logger.debug("%s < %r", self.side, frame) return frame - @asyncio.coroutine - def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): + async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # Defensive assertion for protocol compliance. if self.state is not _expected_state: # pragma: no cover raise InvalidState( @@ -892,21 +870,21 @@ def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # Remove when dropping support for Python < 3.6. if self.writer.transport is not None: # pragma: no cover if self.writer_is_closing(): - yield + await asyncio.sleep(0) try: # drain() cannot be called concurrently by multiple coroutines: # http://bugs.python.org/issue29930. Remove this lock when no # version of Python where this bugs exists is supported anymore. - with (yield from self._drain_lock): + async with self._drain_lock: # Handle flow control automatically. - yield from self.writer.drain() + await self.writer.drain() except ConnectionError: # Terminate the connection if the socket died. self.fail_connection() # Wait until the connection is closed to raise ConnectionClosed # with the correct code and reason. - yield from self.ensure_open() + await self.ensure_open() def writer_is_closing(self): """ @@ -927,8 +905,7 @@ def writer_is_closing(self): except AttributeError: return transport._closed - @asyncio.coroutine - def write_close_frame(self, data=b""): + async def write_close_frame(self, data=b""): """ Write a close frame if and only if the connection state is OPEN. @@ -944,12 +921,9 @@ def write_close_frame(self, data=b""): logger.debug("%s - state = CLOSING", self.side) # 7.1.2. Start the WebSocket Closing Handshake - yield from self.write_frame( - True, OP_CLOSE, data, _expected_state=State.CLOSING - ) + await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) - @asyncio.coroutine - def keepalive_ping(self): + async def keepalive_ping(self): """ Send a Ping frame and wait for a Pong frame at regular intervals. @@ -964,18 +938,18 @@ def keepalive_ping(self): try: while True: - yield from asyncio.sleep(self.ping_interval, loop=self.loop) + await asyncio.sleep(self.ping_interval, loop=self.loop) # ping() cannot raise ConnectionClosed, only CancelledError: # - If the connection is CLOSING, keepalive_ping_task will be # canceled by close_connection() before ping() returns. # - If the connection is CLOSED, keepalive_ping_task must be # canceled already. - ping_waiter = yield from self.ping() + ping_waiter = await self.ping() if self.ping_timeout is not None: try: - yield from asyncio.wait_for( + await asyncio.wait_for( ping_waiter, self.ping_timeout, loop=self.loop ) except asyncio.TimeoutError: @@ -989,8 +963,7 @@ def keepalive_ping(self): except Exception: logger.warning("Unexpected exception in keepalive ping task", exc_info=True) - @asyncio.coroutine - def close_connection(self): + async def close_connection(self): """ 7.1.1. Close the WebSocket Connection @@ -1006,7 +979,7 @@ def close_connection(self): # Wait for the data transfer phase to complete. if self.transfer_data_task is not None: try: - yield from self.transfer_data_task + await self.transfer_data_task except asyncio.CancelledError: pass @@ -1016,7 +989,7 @@ def close_connection(self): # A client should wait for a TCP close from the server. if self.is_client and self.transfer_data_task is not None: - if (yield from self.wait_for_connection_lost()): + if await self.wait_for_connection_lost(): return logger.debug("%s ! timed out waiting for TCP close", self.side) @@ -1025,7 +998,7 @@ def close_connection(self): logger.debug("%s x half-closing TCP connection", self.side) self.writer.write_eof() - if (yield from self.wait_for_connection_lost()): + if await self.wait_for_connection_lost(): return logger.debug("%s ! timed out waiting for TCP close", self.side) @@ -1043,7 +1016,7 @@ def close_connection(self): logger.debug("%s x closing TCP connection", self.side) self.writer.close() - if (yield from self.wait_for_connection_lost()): + if await self.wait_for_connection_lost(): return logger.debug("%s ! timed out waiting for TCP close", self.side) @@ -1052,10 +1025,9 @@ def close_connection(self): self.writer.transport.abort() # connection_lost() is called quickly after aborting. - yield from self.wait_for_connection_lost() + await self.wait_for_connection_lost() - @asyncio.coroutine - def wait_for_connection_lost(self): + async def wait_for_connection_lost(self): """ Wait until the TCP connection is closed or ``self.close_timeout`` elapses. @@ -1064,7 +1036,7 @@ def wait_for_connection_lost(self): """ if not self.connection_lost_waiter.done(): try: - yield from asyncio.wait_for( + await asyncio.wait_for( asyncio.shield(self.connection_lost_waiter), self.close_timeout, loop=self.loop, diff --git a/src/websockets/server.py b/src/websockets/server.py index 424d08922..752170edf 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -89,8 +89,7 @@ def connection_made(self, transport): self.ws_server.register(self) self.handler_task = self.loop.create_task(self.handler()) - @asyncio.coroutine - def handler(self): + async def handler(self): """ Handle the lifecycle of a WebSocket connection. @@ -102,7 +101,7 @@ def handler(self): try: try: - path = yield from self.handshake( + path = await self.handshake( origins=self.origins, available_extensions=self.available_extensions, available_subprotocols=self.available_subprotocols, @@ -154,11 +153,11 @@ def handler(self): self.write_http_response(status, headers, body) self.fail_connection() - yield from self.wait_closed() + await self.wait_closed() return try: - yield from self.ws_handler(self, path) + await self.ws_handler(self, path) except Exception: logger.error("Error in connection handler", exc_info=True) if not self.closed: @@ -166,7 +165,7 @@ def handler(self): raise try: - yield from self.close() + await self.close() except ConnectionError: logger.debug("Connection error in closing handshake", exc_info=True) raise @@ -188,8 +187,7 @@ def handler(self): # connections before terminating. self.ws_server.unregister(self) - @asyncio.coroutine - def read_http_request(self): + async def read_http_request(self): """ Read request line and headers from the HTTP request. @@ -202,7 +200,7 @@ def read_http_request(self): """ try: - path, headers = yield from read_request(self.reader) + path, headers = await read_request(self.reader) except ValueError as exc: raise InvalidMessage("Malformed HTTP message") from exc @@ -426,8 +424,7 @@ def select_subprotocol(client_subprotocols, server_subprotocols): ) return sorted(subprotocols, key=priority)[0] - @asyncio.coroutine - def handshake( + async def handshake( self, origins=None, available_extensions=None, @@ -458,12 +455,12 @@ def handshake( Return the path of the URI of the request. """ - path, request_headers = yield from self.read_http_request() + path, request_headers = await self.read_http_request() # Hook for customizing request handling, for example checking # authentication or treating some paths as plain HTTP endpoints. if asyncio.iscoroutinefunction(self.process_request): - early_response = yield from self.process_request(path, request_headers) + early_response = await self.process_request(path, request_headers) else: early_response = self.process_request(path, request_headers) @@ -604,8 +601,7 @@ def close(self): if self.close_task is None: self.close_task = self.loop.create_task(self._close()) - @asyncio.coroutine - def _close(self): + async def _close(self): """ Implementation of :meth:`close`. @@ -618,11 +614,11 @@ def _close(self): self.server.close() # Wait until self.server.close() completes. - yield from self.server.wait_closed() + await self.server.wait_closed() # Wait until all accepted connections reach connection_made() and call # register(). See https://bugs.python.org/issue34852 for details. - yield from asyncio.sleep(0) + await asyncio.sleep(0) # Close open connections. fail_connection() will cancel the transfer # data task, which is expected to cause the handler task to terminate. @@ -637,7 +633,7 @@ def _close(self): # running tasks. # TODO: it would be nicer to wait only for the connection handler # and let the handler wait for the connection to close. - yield from asyncio.wait( + await asyncio.wait( [websocket.handler_task for websocket in self.websockets] + [ websocket.close_connection_task @@ -650,8 +646,7 @@ def _close(self): # Tell wait_closed() to return. self.closed_waiter.set_result(None) - @asyncio.coroutine - def wait_closed(self): + async def wait_closed(self): """ Wait until the server is closed and all connections are terminated. @@ -659,7 +654,7 @@ def wait_closed(self): there are no pending tasks left. """ - yield from asyncio.shield(self.closed_waiter) + await asyncio.shield(self.closed_waiter) @property def sockets(self): @@ -845,10 +840,8 @@ def __init__( self.ws_server = ws_server @asyncio.coroutine - def __iter__(self): # pragma: no cover - server = yield from self._creating_server - self.ws_server.wrap(server) - return self.ws_server + def __iter__(self): + return self.__await_impl__() async def __aenter__(self): return await self @@ -858,8 +851,6 @@ async def __aexit__(self, exc_type, exc_value, traceback): await self.ws_server.wait_closed() async def __await_impl__(self): - # Duplicated with __iter__ because Python 3.7 requires an async function - # (as explained in __await__ below) which Python 3.4 doesn't support. server = await self._creating_server self.ws_server.wrap(server) return self.ws_server @@ -895,8 +886,7 @@ def unix_serve(ws_handler, path, **kwargs): del Serve.__aexit__ del Serve.__await__ - @asyncio.coroutine - def serve(*args, **kwds): + async def serve(*args, **kwds): return Serve(*args, **kwds).__iter__() serve.__doc__ = Serve.__doc__ diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 633e097bc..d155f7fae 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -49,26 +49,25 @@ testcert = bytes(pathlib.Path(__file__).with_name("test_localhost.pem")) -@asyncio.coroutine -def handler(ws, path): +async def handler(ws, path): if path == "/attributes": - yield from ws.send(repr((ws.host, ws.port, ws.secure))) + await ws.send(repr((ws.host, ws.port, ws.secure))) elif path == "/close_timeout": - yield from ws.send(repr(ws.close_timeout)) + await ws.send(repr(ws.close_timeout)) elif path == "/path": - yield from ws.send(str(ws.path)) + await ws.send(str(ws.path)) elif path == "/headers": - yield from ws.send(repr(ws.request_headers)) - yield from ws.send(repr(ws.response_headers)) + await ws.send(repr(ws.request_headers)) + await ws.send(repr(ws.response_headers)) elif path == "/extensions": - yield from ws.send(repr(ws.extensions)) + await ws.send(repr(ws.extensions)) elif path == "/subprotocol": - yield from ws.send(repr(ws.subprotocol)) + await ws.send(repr(ws.subprotocol)) elif path == "/slow_stop": - yield from ws.wait_closed() - yield from asyncio.sleep(2 * MS) + await ws.wait_closed() + await asyncio.sleep(2 * MS) else: - yield from ws.send((yield from ws.recv())) + await ws.send((await ws.recv())) @contextlib.contextmanager @@ -162,31 +161,27 @@ def get_server_uri(server, secure=False, resource_name="/", user_info=None): class UnauthorizedServerProtocol(WebSocketServerProtocol): - @asyncio.coroutine - def process_request(self, path, request_headers): + async def process_request(self, path, request_headers): # Test returning headers as a Headers instance (1/3) return http.HTTPStatus.UNAUTHORIZED, Headers([("X-Access", "denied")]), b"" class ForbiddenServerProtocol(WebSocketServerProtocol): - @asyncio.coroutine - def process_request(self, path, request_headers): + async def process_request(self, path, request_headers): # Test returning headers as a dict (2/3) return http.HTTPStatus.FORBIDDEN, {"X-Access": "denied"}, b"" class HealthCheckServerProtocol(WebSocketServerProtocol): - @asyncio.coroutine - def process_request(self, path, request_headers): + async def process_request(self, path, request_headers): # Test returning headers as a list of pairs (3/3) if path == "/__health__/": return http.HTTPStatus.OK, [("X-Access", "OK")], b"status = green\n" class SlowServerProtocol(WebSocketServerProtocol): - @asyncio.coroutine - def process_request(self, path, request_headers): - yield from asyncio.sleep(10 * MS) + async def process_request(self, path, request_headers): + await asyncio.sleep(10 * MS) class FooClientProtocol(WebSocketClientProtocol): @@ -957,9 +952,8 @@ def wrong_build_response(headers, key): @with_server() @unittest.mock.patch("websockets.client.read_response") def test_server_does_not_switch_protocols(self, _read_response): - @asyncio.coroutine - def wrong_read_response(stream): - status_code, reason, headers = yield from read_response(stream) + async def wrong_read_response(stream): + status_code, reason, headers = await read_response(stream) return 400, "Bad Request", headers _read_response.side_effect = wrong_read_response diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 896c0fe4b..70c2be0bd 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -117,10 +117,9 @@ def make_drain_slow(self, delay=MS): original_drain = self.protocol.writer.drain - @asyncio.coroutine - def delayed_drain(): - yield from asyncio.sleep(delay, loop=self.loop) - yield from original_drain() + async def delayed_drain(): + await asyncio.sleep(delay, loop=self.loop) + await original_drain() self.protocol.writer.drain = delayed_drain @@ -474,8 +473,7 @@ def test_recv_queue_full(self): self.assertEqual(list(self.protocol.messages), []) def test_recv_other_error(self): - @asyncio.coroutine - def read_message(): + async def read_message(): raise Exception("BOOM") self.protocol.read_message = read_message @@ -1034,8 +1032,7 @@ def test_keepalive_ping_with_no_ping_timeout(self): def test_keepalive_ping_unexpected_error(self): self.restart_protocol_with_keepalive_ping() - @asyncio.coroutine - def ping(): + async def ping(): raise Exception("BOOM") self.protocol.ping = ping From f77ab68e23e61658548e2a624683ec07fe816b91 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 17:29:14 +0100 Subject: [PATCH 030/281] Miscellaneous cleanups. --- setup.py | 10 ++-------- src/websockets/protocol.py | 5 ++--- src/websockets/server.py | 2 +- tests/test_client_server.py | 8 +------- 4 files changed, 6 insertions(+), 19 deletions(-) diff --git a/setup.py b/setup.py index 78d6f7af4..2956058a4 100644 --- a/setup.py +++ b/setup.py @@ -8,15 +8,9 @@ description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -# When dropping Python < 3.5, change to: -# long_description = (root_dir / 'README.rst').read_text(encoding='utf-8') -with (root_dir / 'README.rst').open(encoding='utf-8') as f: - long_description = f.read() +long_description = (root_dir / 'README.rst').read_text(encoding='utf-8') -# When dropping Python < 3.5, change to: -# exec((root_dir / 'src' / 'websockets' / 'version.py').read_text(encoding='utf-8')) -with (root_dir / 'src' / 'websockets' / 'version.py').open(encoding='utf-8') as f: - exec(f.read()) +exec((root_dir / 'src' / 'websockets' / 'version.py').read_text(encoding='utf-8')) py_version = sys.version_info[:2] diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 7f20bed62..5c60348aa 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -525,7 +525,7 @@ async def close(self, code=1000, reason=""): try: # If close() is canceled during the wait, self.transfer_data_task - # is canceled before the timeout elapses (on Python ≥ 3.4.3). + # is canceled before the timeout elapses. # This helps closing connections when shutting down a server. await asyncio.wait_for( self.transfer_data_task, self.close_timeout, loop=self.loop @@ -797,8 +797,7 @@ async def read_data_frame(self, max_size): elif frame.opcode == OP_PING: # Answer pings. - # Replace by frame.data.hex() when dropping Python < 3.5. - ping_hex = binascii.hexlify(frame.data).decode() or "[empty]" + ping_hex = frame.data.hex() or "[empty]" logger.debug( "%s - received ping, sending pong: %s", self.side, ping_hex ) diff --git a/src/websockets/server.py b/src/websockets/server.py index 752170edf..839b3c861 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -677,7 +677,7 @@ class Serve: :meth:`~websockets.server.WebSocketServer.wait_closed` methods for terminating the server and cleaning up its resources. - On Python ≥ 3.5, :func:`serve` can also be used as an asynchronous context + On Python ≥ 3.5.1, :func:`serve` can also be used as an asynchronous context manager. In this case, the server is shut down when exiting the context. :func:`serve` is a wrapper around the event loop's diff --git a/tests/test_client_server.py b/tests/test_client_server.py index d155f7fae..6b80c7f6e 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1133,13 +1133,7 @@ def start_client(self, path="/", **kwds): @with_server() def test_ws_uri_is_rejected(self): with self.assertRaises(ValueError): - client = connect( - get_server_uri(self.server, secure=False), ssl=self.client_context - ) - # With Python ≥ 3.5, the exception is raised by connect() even - # before awaiting. However, with Python 3.4 the exception is - # raised only when awaiting. - self.loop.run_until_complete(client) # pragma: no cover + connect(get_server_uri(self.server, secure=False), ssl=self.client_context) @with_server() def test_redirect_insecure(self): From 22a4604cfdaedc27141fef8048c55cdf2c899185 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 17:34:31 +0100 Subject: [PATCH 031/281] Remove documentation for Python 3.4. --- docs/intro.rst | 25 ------------------------- example/old_client.py | 11 +++++------ example/old_server.py | 7 +++---- 3 files changed, 8 insertions(+), 35 deletions(-) diff --git a/docs/intro.rst b/docs/intro.rst index b153d2f5d..376b7d9ca 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -233,7 +233,6 @@ Python < 3.6 This documentation takes advantage of several features that aren't available in Python < 3.6: -- ``await`` and ``async`` were added in Python 3.5; - Asynchronous context managers didn't work well until Python 3.5.1; - Asynchronous iterators were added in Python 3.6; - f-strings were introduced in Python 3.6 (this is unrelated to :mod:`asyncio` @@ -242,34 +241,10 @@ in Python < 3.6: Here's how to adapt the basic server example. .. literalinclude:: ../example/old_server.py - :emphasize-lines: 8-9,18 And here's the basic client example. .. literalinclude:: ../example/old_client.py - :emphasize-lines: 8-11,13,22-23 - -``await`` and ``async`` -....................... - -If you're using Python < 3.5, you must substitute:: - - async def ... - -with:: - - @asyncio.coroutine - def ... - -and:: - - await ... - -with:: - - yield from ... - -Otherwise you will encounter a :exc:`SyntaxError`. Asynchronous context managers ............................. diff --git a/example/old_client.py b/example/old_client.py index c44d6edff..be34f14be 100755 --- a/example/old_client.py +++ b/example/old_client.py @@ -5,21 +5,20 @@ import asyncio import websockets -@asyncio.coroutine -def hello(): - websocket = yield from websockets.connect( +async def hello(): + websocket = await websockets.connect( 'ws://localhost:8765/') try: name = input("What's your name? ") - yield from websocket.send(name) + await websocket.send(name) print("> {}".format(name)) - greeting = yield from websocket.recv() + greeting = await websocket.recv() print("< {}".format(greeting)) finally: - yield from websocket.close() + await websocket.close() asyncio.get_event_loop().run_until_complete(hello()) diff --git a/example/old_server.py b/example/old_server.py index bb19bdabc..8c63e33e6 100755 --- a/example/old_server.py +++ b/example/old_server.py @@ -5,14 +5,13 @@ import asyncio import websockets -@asyncio.coroutine -def hello(websocket, path): - name = yield from websocket.recv() +async def hello(websocket, path): + name = await websocket.recv() print("< {}".format(name)) greeting = "Hello {}!".format(name) - yield from websocket.send(greeting) + await websocket.send(greeting) print("> {}".format(greeting)) start_server = websockets.serve(hello, 'localhost', 8765) From 40adef93ae4cc74fef34d8ad4e72648a361799e8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Dec 2018 22:33:24 +0100 Subject: [PATCH 032/281] Add documentation for extensions. Fix #255. --- docs/api.rst | 9 ++ docs/changelog.rst | 2 + docs/extensions.rst | 85 +++++++++++++++++++ docs/index.rst | 1 + src/websockets/extensions/base.py | 72 ++++++++++------ .../extensions/permessage_deflate.py | 50 ++++++++--- 6 files changed, 180 insertions(+), 39 deletions(-) create mode 100644 docs/extensions.rst diff --git a/docs/api.rst b/docs/api.rst index 80d64e254..e480604bb 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -82,6 +82,15 @@ Shared .. autoattribute:: open .. autoattribute:: closed +Per-Message Deflate Extension +............................. + +.. automodule:: websockets.extensions.permessage_deflate + + .. autoclass:: ServerPerMessageDeflateFactory + + .. autoclass:: ClientPerMessageDeflateFactory + Exceptions .......... diff --git a/docs/changelog.rst b/docs/changelog.rst index 87e2e0ac8..e4fd55fb4 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,8 @@ Also: * :func:`~client.connect()` handles redirects from the server during the handshake. +* Added documentation for extensions. + 7.0 ... diff --git a/docs/extensions.rst b/docs/extensions.rst new file mode 100644 index 000000000..3a5885009 --- /dev/null +++ b/docs/extensions.rst @@ -0,0 +1,85 @@ +Extensions +========== + +.. currentmodule:: websockets + +The WebSocket protocol supports extensions_. + +At the time of writing, there's only one `registered extension`_, WebSocket +Per-Message Deflate, specified in :rfc:`7692`. + +.. _extensions: https://tools.ietf.org/html/rfc6455#section-9 +.. _registered extension: https://www.iana.org/assignments/websocket/websocket.xhtml#extension-name + +Per-Message Deflate +------------------- + +:func:`~server.serve()` and :func:`~client.connect()` enable the Per-Message +Deflate extension by default. You can disable this with ``compression=None``. + +You can also configure the Per-Message Deflate extension explicitly if you +want to customize its parameters. + +Here's an example on the server side:: + + import websockets + from websockets.extensions import permessage_deflate + + websockets.serve( + ..., + extensions=[ + permessage_deflate.ServerPerMessageDeflateFactory( + server_max_window_bits=11, + client_max_window_bits=11, + compress_settings={'memLevel': 4}, + ), + ], + ) + +Here's an example on the client side:: + + import websockets + from websockets.extensions import permessage_deflate + + websockets.connect( + ..., + extensions=[ + permessage_deflate.ClientPerMessageDeflateFactory( + server_max_window_bits=11, + client_max_window_bits=11, + compress_settings={'memLevel': 4}, + ), + ], + ) + +Refer to the API documentation of +:class:`~extensions.permessage_deflate.ServerPerMessageDeflateFactory` and +:class:`~extensions.permessage_deflate.ClientPerMessageDeflateFactory` for +details. + +Writing an extension +-------------------- + +During the opening handshake, WebSocket clients and servers negotiate which +extensions will be used with which parameters. Then each frame is processed by +extensions before it's sent and after it's received. + +As a consequence writing an extension requires implementing several classes: + +1. Extension Factory: it negotiates parameters and instanciates the extension. + Clients and servers require separate extension factories with distict APIs. + +2. Extension: it decodes incoming frames and encodes outgoing frames. If the + extension is symmetrical, clients and servers can use the same class. + +``websockets`` provides abstract base classes for extension factories and +extensions. + +.. autoclass:: websockets.extensions.base.ServerExtensionFactory + :members: + +.. autoclass:: websockets.extensions.base.ClientExtensionFactory + :members: + +.. autoclass:: websockets.extensions.base.Extension + :members: diff --git a/docs/index.rst b/docs/index.rst index 7ccd9463e..040d41598 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -60,6 +60,7 @@ These guides will help you build and deploy a ``websockets`` application. cheatsheet deployment + extensions Reference --------- diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index 69b55b3f8..cf3f9a2ec 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -1,7 +1,8 @@ """ -The :mod:`websockets.extensions.base` defines abstract classes for extensions. +The :mod:`websockets.extensions.base` module defines abstract classes for +implementing extensions as specified in `section 9 of RFC 6455`_. -See https://tools.ietf.org/html/rfc6455#section-9. +.. _section 9 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-9 """ @@ -10,11 +11,14 @@ class ClientExtensionFactory: """ Abstract class for client-side extension factories. - Extension factories handle configuration and negotiation. - """ - name = ... + @property + def name(self): + """ + Extension identifier. + + """ def get_request_params(self): """ @@ -25,18 +29,17 @@ def get_request_params(self): """ def process_response_params(self, params, accepted_extensions): - """" - Process response parameters. + """ + Process response parameters received from the server. - ``params`` are a list of (name, value) pairs. + ``params`` is a list of (name, value) pairs. - ``accepted_extensions`` is a list of previously accepted extensions, - represented by extension instances. + ``accepted_extensions`` is a list of previously accepted extensions. - Return an extension instance (an instance of a subclass of - :class:`Extension`) if these parameters are acceptable. + If parameters are acceptable, return an extension: an instance of a + subclass of :class:`Extension`. - Raise :exc:`~websockets.exceptions.NegotiationError` if they aren't. + If they aren't, raise :exc:`~websockets.exceptions.NegotiationError`. """ @@ -45,24 +48,30 @@ class ServerExtensionFactory: """ Abstract class for server-side extension factories. - Extension factories handle configuration and negotiation. - """ - name = ... + @property + def name(self): + """ + Extension identifier. + + """ def process_request_params(self, params, accepted_extensions): - """" - Process request parameters. + """ + Process request parameters received from the client. + + ``params`` is a list of (name, value) pairs. - ``accepted_extensions`` is a list of previously accepted extensions, - represented by extension instances. + ``accepted_extensions`` is a list of previously accepted extensions. - Return response params (a list of (name, value) pairs) and an - extension instance (an instance of a subclass of :class:`Extension`) - to accept this extension. + To accept the offer, return a 2-uple containing: - Raise :exc:`~websockets.exceptions.NegotiationError` to reject it. + - response parameters: a list of (name, value) pairs + - an extension: an instance of a subclass of :class:`Extension` + + To reject the offer, raise + :exc:`~websockets.exceptions.NegotiationError`. """ @@ -73,13 +82,21 @@ class Extension: """ - name = ... + @property + def name(self): + """ + Extension identifier. + + """ def decode(self, frame, *, max_size=None): """ Decode an incoming frame. - Return a frame. + The ``frame`` parameter and the return value are + :class:`~websockets.framing.Frame` instances. + + """ @@ -87,6 +104,7 @@ def encode(self, frame): """ Encode an outgoing frame. - Return a frame. + The ``frame`` parameter and the return value are + :class:`~websockets.framing.Frame` instances. """ diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index dad6f1ec1..167746021 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -113,7 +113,22 @@ def _extract_parameters(params, *, is_server): class ClientPerMessageDeflateFactory: """ - Client-side extension factory for permessage-deflate extension. + Client-side extension factory for Per-Message Deflate extension. + + These parameters behave as described in `section 7.1 of RFC 7692`_: + + - ``server_no_context_takeover`` + - ``client_no_context_takeover`` + - ``server_max_window_bits`` + - ``client_max_window_bits`` + + Set them to ``True`` to include them in the negotiation offer without a + value or to an integer value to include them with this value. + + .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1 + + ``compress_settings`` is an optional :class:`dict` of keyword arguments + for :func:`zlib.compressobj`, excluding ``wbits``. """ @@ -128,9 +143,7 @@ def __init__( compress_settings=None, ): """ - Configure permessage-deflate extension factory. - - See https://tools.ietf.org/html/rfc7692#section-7.1. + Configure the Per-Message Deflate extension factory. """ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15): @@ -166,7 +179,7 @@ def get_request_params(self): ) def process_response_params(self, params, accepted_extensions): - """" + """ Process response parameters. Return an extension instance. @@ -269,7 +282,22 @@ def process_response_params(self, params, accepted_extensions): class ServerPerMessageDeflateFactory: """ - Server-side extension factory for permessage-deflate extension. + Server-side extension factory for the Per-Message Deflate extension. + + These parameters behave as described in `section 7.1 of RFC 7692`_: + + - ``server_no_context_takeover`` + - ``client_no_context_takeover`` + - ``server_max_window_bits`` + - ``client_max_window_bits`` + + Set them to ``True`` to include them in the negotiation offer without a + value or to an integer value to include them with this value. + + .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1 + + ``compress_settings`` is an optional :class:`dict` of keyword arguments + for :func:`zlib.compressobj`, excluding ``wbits``. """ @@ -284,9 +312,7 @@ def __init__( compress_settings=None, ): """ - Configure permessage-deflate extension factory. - - See https://tools.ietf.org/html/rfc7692#section-7.1. + Configure the Per-Message Deflate extension factory. """ if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15): @@ -306,7 +332,7 @@ def __init__( self.compress_settings = compress_settings def process_request_params(self, params, accepted_extensions): - """" + """ Process request parameters. Return response params and an extension instance. @@ -416,7 +442,7 @@ def process_request_params(self, params, accepted_extensions): class PerMessageDeflate: """ - permessage-deflate extension. + Per-Message Deflate extension. """ @@ -431,7 +457,7 @@ def __init__( compress_settings=None, ): """ - Configure permessage-deflate extension. + Configure the Per-Message Deflate extension. """ if compress_settings is None: From 8bcfd9aacd4b93b9df687a6d1e171b03ea3727c9 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Dec 2018 11:57:38 +0100 Subject: [PATCH 033/281] Remove obsolete description. That behavior changed in 7.0. --- docs/deployment.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/deployment.rst b/docs/deployment.rst index 0f571520d..7eb350606 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -30,10 +30,6 @@ with the object returned by :func:`~server.serve`: - calling its ``close()`` method, then waiting for its ``wait_closed()`` method to complete. -Tasks that handle connections will be canceled. For example, if the handler -is awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`, that call will -raise :exc:`~asyncio.CancelledError`. - On Unix systems, shutdown is usually triggered by sending a signal. Here's a full example (Unix-only): From 4034bc768f1adec08274ef28f62ea6e401d4e88e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Dec 2018 11:59:56 +0100 Subject: [PATCH 034/281] Remove documentation for Python 3.4. Missed from 22a4604c. --- docs/deployment.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/deployment.rst b/docs/deployment.rst index 7eb350606..6758e6afd 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -37,9 +37,9 @@ Here's a full example (Unix-only): .. literalinclude:: ../example/shutdown.py :emphasize-lines: 13,17-19 -``async`` and ``await`` were introduced in Python 3.5. websockets supports -asynchronous context managers on Python ≥ 3.5.1. ``async for`` was introduced -in Python 3.6. Here's the equivalent for older Python versions: +websockets supports asynchronous context managers on Python ≥ 3.5.1. ``async +for`` was introduced in Python 3.6. Here's the equivalent for older Python +versions: .. literalinclude:: ../example/old_shutdown.py :emphasize-lines: 22-25 From 3da06faadec19d50cc068f62e10e2ca456396f53 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 31 Dec 2018 10:16:15 +0100 Subject: [PATCH 035/281] Document how to optimize memory usage. Include benchmarking scripts. Also improve neighboring docs. Fix #272. --- docs/changelog.rst | 2 + docs/deployment.rst | 101 ++++++++++++++++++++++++++++++++++--- docs/design.rst | 10 ++-- docs/extensions.rst | 6 ++- docs/intro.rst | 2 + docs/security.rst | 33 +++++------- docs/spelling_wordlist.txt | 4 +- performance/mem_client.py | 54 ++++++++++++++++++++ performance/mem_server.py | 63 +++++++++++++++++++++++ src/websockets/http.py | 4 +- src/websockets/protocol.py | 8 +-- 11 files changed, 247 insertions(+), 40 deletions(-) create mode 100644 performance/mem_client.py create mode 100644 performance/mem_server.py diff --git a/docs/changelog.rst b/docs/changelog.rst index e4fd55fb4..320300f64 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,8 @@ Also: * Added documentation for extensions. +* Documented how to optimize memory usage. + 7.0 ... diff --git a/docs/deployment.rst b/docs/deployment.rst index 6758e6afd..f8bc7f94b 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -50,15 +50,102 @@ projects try to help with this problem. If your server doesn't run in the main thread, look at :func:`~asyncio.AbstractEventLoop.call_soon_threadsafe`. -Memory use ----------- +Memory usage +------------ + +.. _memory-usage: + +In most cases, memory usage of a WebSocket server is proportional to the +number of open connections. When a server handles thousands of connections, +memory usage can become a bottleneck. + +Memory usage of a single connection is the sum of: + +1. the baseline amount of memory ``websockets`` requires for each connection, +2. the amount of data held in buffers before the application processes it, +3. any additional memory allocated by the application itself. + +Baseline +........ + +Compression settings are the main factor affecting the baseline amount of +memory used by each connection. + +By default ``websockets`` maximizes compression rate at the expense of memory +usage. If memory usage is an issue, lowering compression settings can help: + +- Context Takeover is necessary to get good performance for almost all + applications. It should remain enabled. +- Window Bits is a trade-off between memory usage and compression rate. + It defaults to 15 and can be lowered. The default value isn't optimal + for small, repetitive messages which are typical of WebSocket servers. +- Memory Level is a trade-off between memory usage and compression speed. + It defaults to 8 and can be lowered. A lower memory level can actually + increase speed thanks to memory locality, even if the CPU does more work! + +See this :ref:`example ` for how to +configure compression settings. + +Here's how various compression settings affect memory usage of a single +connection on a 64-bit system, as well a benchmark_ of compressed size and +compression time for a corpus of small JSON documents. + ++-------------+-------------+--------------+--------------+------------------+------------------+ +| Compression | Window Bits | Memory Level | Memory usage | Size vs. default | Time vs. default | ++=============+=============+==============+==============+==================+==================+ +| *default* | 15 | 8 | 325 KiB | +0% | +0% + ++-------------+-------------+--------------+--------------+------------------+------------------+ +| | 14 | 7 | 181 KiB | +1.5% | -5.3% | ++-------------+-------------+--------------+--------------+------------------+------------------+ +| | 13 | 6 | 110 KiB | +2.8% | -7.5% | ++-------------+-------------+--------------+--------------+------------------+------------------+ +| | 12 | 5 | 73 KiB | +4.4% | -18.9% | ++-------------+-------------+--------------+--------------+------------------+------------------+ +| | 11 | 4 | 55 KiB | +8.5% | -18.8% | ++-------------+-------------+--------------+--------------+------------------+------------------+ +| *disabled* | N/A | N/A | 22 KiB | N/A | N/A | ++-------------+-------------+--------------+--------------+------------------+------------------+ + +*Don't assume this example is representative! Compressed size and compression +time depend heavily on the kind of messages exchanged by the application!* + +You can run the same benchmark for your application by creating a list of +typical messages and passing it to the ``_benchmark`` function_. + +.. _benchmark: https://gist.github.com/aaugustin/fbea09ce8b5b30c4e56458eb081fe599 +.. _function: https://gist.github.com/aaugustin/fbea09ce8b5b30c4e56458eb081fe599#file-compression-py-L48-L144 + +This `blog post by Ilya Grigorik`_ provides more details about how compression +settings affect memory usage and how to optimize them. + +.. _blog post by Ilya Grigorik: https://www.igvita.com/2013/11/27/configuring-and-optimizing-websocket-compression/ + +This `experiment by Peter Thorson`_ suggests Window Bits = 11, Memory Level = +4 as a sweet spot for optimizing memory usage. + +.. _experiment by Peter Thorson: https://www.ietf.org/mail-archive/web/hybi/current/msg10222.html + +Buffers +....... + +Under normal circumstances, buffers are almost always empty. + +Under high load, if a server receives more messages than it can process, +bufferbloat can result in excessive memory use. + +By default ``websockets`` has generous limits. It is strongly recommended to +adapt them to your application. When you call :func:`~server.serve()`: + +- Set ``max_size`` (default: 1 MiB, UTF-8 encoded) to the maximum size of + messages your application generates. +- Set ``max_queue`` (default: 32) to the maximum number of messages your + application expects to receive faster than it can process them. The queue + provides burst tolerance without slowing down the TCP connection. -In order to avoid excessive memory use caused by buffer bloat, it is strongly -recommended to :ref:`tune buffer sizes `. +Furthermore, you can lower ``read_limit`` and ``write_limit`` (default: +64 KiB) to reduce the size of buffers for incoming and outgoing data. -Most importantly ``max_size`` should be lowered according to the expected size -of messages. It is also suggested to lower ``max_queue``, ``read_limit`` and -``write_limit`` if memory use is a concern. +The design document provides :ref:`more details about buffers`. Port sharing ------------ diff --git a/docs/design.rst b/docs/design.rst index 03f1ec163..c6097f724 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -513,21 +513,21 @@ Bufferbloat can happen at every level in the stack where there is a buffer. For each connection, the receiving side contains these buffers: - OS buffers: tuning them is an advanced optimization. -- :class:`~asyncio.StreamReader` bytes buffer: the default limit is 64kB. +- :class:`~asyncio.StreamReader` bytes buffer: the default limit is 64 KiB. You can set another limit by passing a ``read_limit`` keyword argument to :func:`~client.connect()` or :func:`~server.serve()`. - Incoming messages :class:`~collections.deque`: its size depends both on the size and the number of messages it contains. By default the maximum - UTF-8 encoded size is 1MB and the maximum number is 32. In the worst case, - after UTF-8 decoding, a single message could take up to 4MB of memory and - the overall memory consumption could reach 128MB. You should adjust these + UTF-8 encoded size is 1 MiB and the maximum number is 32. In the worst case, + after UTF-8 decoding, a single message could take up to 4 MiB of memory and + the overall memory consumption could reach 128 MiB. You should adjust these limits by setting the ``max_size`` and ``max_queue`` keyword arguments of :func:`~client.connect()` or :func:`~server.serve()` according to your application's requirements. For each connection, the sending side contains these buffers: -- :class:`~asyncio.StreamWriter` bytes buffer: the default size is 64kB. +- :class:`~asyncio.StreamWriter` bytes buffer: the default size is 64 KiB. You can set another limit by passing a ``write_limit`` keyword argument to :func:`~client.connect()` or :func:`~server.serve()`. - OS buffers: tuning them is an advanced optimization. diff --git a/docs/extensions.rst b/docs/extensions.rst index 3a5885009..7c282ffd0 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -20,6 +20,8 @@ Deflate extension by default. You can disable this with ``compression=None``. You can also configure the Per-Message Deflate extension explicitly if you want to customize its parameters. +.. _per-message-deflate-configuration-example: + Here's an example on the server side:: import websockets @@ -66,8 +68,8 @@ extensions before it's sent and after it's received. As a consequence writing an extension requires implementing several classes: -1. Extension Factory: it negotiates parameters and instanciates the extension. - Clients and servers require separate extension factories with distict APIs. +1. Extension Factory: it negotiates parameters and instantiates the extension. + Clients and servers require separate extension factories with distinct APIs. 2. Extension: it decodes incoming frames and encodes outgoing frames. If the extension is symmetrical, clients and servers can use the same class. diff --git a/docs/intro.rst b/docs/intro.rst index 376b7d9ca..dea152ab1 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -50,6 +50,8 @@ Here's a corresponding WebSocket client example. Using :func:`connect` as an asynchronous context manager ensures the connection is closed before exiting the ``hello`` coroutine. +.. _secure-server-example: + Secure example -------------- diff --git a/docs/security.rst b/docs/security.rst index f0d1deee3..e9acf0629 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -1,8 +1,17 @@ Security ======== +Encryption +---------- + +For production use, a server should require encrypted connections. + +See this example of :ref:`encrypting connections with TLS +`. + Memory use ---------- + .. warning:: An attacker who can open an arbitrary number of connections will be able @@ -10,27 +19,13 @@ Memory use by denial of service attacks, you must reject suspicious connections before they reach ``websockets``, typically in a reverse proxy. -The baseline memory use for a connection is about 20kB. - -The incoming bytes buffer, incoming messages queue and outgoing bytes buffer -contribute to the memory use of a connection. By default, each bytes buffer -takes up to 64kB and the messages queue up to 128MB, which is very large. - -Most applications use small messages. Setting ``max_size`` according to the -application's requirements is strongly recommended. See :ref:`buffers` for -details about tuning buffers. - -When compression is enabled, additional memory may be allocated for carrying -the compression context across messages, depending on the context takeover and -window size parameters. With the default configuration, this adds 320kB to the -memory use for a connection. +With the default settings, opening a connection uses 325 KiB of memory. -You can reduce this amount by configuring the ``PerMessageDeflate`` extension -with lower ``server_max_window_bits`` and ``client_max_window_bits`` values. -These parameters default is 15. Lowering them to 11 is a good choice. +Sending some highly compressed messages could use up to 128 MiB of memory +with an amplification factor of 1000 between network traffic and memory use. -Finally, memory consumed by your application code also counts towards the -memory use of a connection. +Configuring a server to :ref:`optimize memory usage ` will +improve security in addition to improving performance. Other limits ------------ diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index ba30efd99..c2988ead5 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -14,10 +14,11 @@ cryptocurrency daemonize fractalideas iterable -kB keepalive +KiB lifecycle Lifecycle +MiB nginx permessage pong @@ -28,6 +29,7 @@ subprotocol subprotocols TLS Unparse +uple websocket WebSocket websockets diff --git a/performance/mem_client.py b/performance/mem_client.py new file mode 100644 index 000000000..890216edf --- /dev/null +++ b/performance/mem_client.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +import asyncio +import statistics +import tracemalloc + +import websockets +from websockets.extensions import permessage_deflate + + +CLIENTS = 10 +INTERVAL = 1 / 10 # seconds + +MEM_SIZE = [] + + +async def mem_client(client): + # Space out connections to make them sequential. + await asyncio.sleep(client * INTERVAL) + + tracemalloc.start() + + async with websockets.connect( + "ws://localhost:8765", + extensions=[ + permessage_deflate.ClientPerMessageDeflateFactory( + server_max_window_bits=10, + client_max_window_bits=10, + compress_settings={"memLevel": 3}, + ) + ], + ) as ws: + await ws.send("hello") + await ws.recv() + + await ws.send(b"hello") + await ws.recv() + + MEM_SIZE.append(tracemalloc.get_traced_memory()[0]) + tracemalloc.stop() + + # Hold connection open until the end of the test. + await asyncio.sleep(CLIENTS * INTERVAL) + + +asyncio.get_event_loop().run_until_complete( + asyncio.gather(*[mem_client(client) for client in range(CLIENTS + 1)]) +) + +# First connection incurs non-representative setup costs. +del MEM_SIZE[0] + +print(f"µ = {statistics.mean(MEM_SIZE) / 1024:.1f} KiB") +print(f"σ = {statistics.stdev(MEM_SIZE) / 1024:.1f} KiB") diff --git a/performance/mem_server.py b/performance/mem_server.py new file mode 100644 index 000000000..6c8cef2ec --- /dev/null +++ b/performance/mem_server.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +import asyncio +import signal +import statistics +import tracemalloc + +import websockets +from websockets.extensions import permessage_deflate + + +CLIENTS = 10 +INTERVAL = 1 / 10 # seconds + +MEM_SIZE = [] + + +async def handler(ws, path): + msg = await ws.recv() + await ws.send(msg) + + msg = await ws.recv() + await ws.send(msg) + + MEM_SIZE.append(tracemalloc.get_traced_memory()[0]) + tracemalloc.stop() + + tracemalloc.start() + + # Hold connection open until the end of the test. + await asyncio.sleep(CLIENTS * INTERVAL) + + +async def mem_server(stop): + async with websockets.serve( + handler, + "localhost", + 8765, + extensions=[ + permessage_deflate.ServerPerMessageDeflateFactory( + server_max_window_bits=10, + client_max_window_bits=10, + compress_settings={"memLevel": 3}, + ) + ], + ): + await stop + + +loop = asyncio.get_event_loop() + +stop = asyncio.Future() +loop.add_signal_handler(signal.SIGINT, stop.set_result, None) + +tracemalloc.start() + +loop.run_until_complete(mem_server(stop)) + +# First connection incurs non-representative setup costs. +del MEM_SIZE[0] + +print(f"µ = {statistics.mean(MEM_SIZE) / 1024:.1f} KiB") +print(f"σ = {statistics.stdev(MEM_SIZE) / 1024:.1f} KiB") diff --git a/src/websockets/http.py b/src/websockets/http.py index 5e04e53bd..e28acac9f 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -182,9 +182,9 @@ async def read_line(stream): Return :class:`bytes` without CRLF. """ - # Security: this is bounded by the StreamReader's limit (default = 32kB). + # Security: this is bounded by the StreamReader's limit (default = 32 KiB). line = await stream.readline() - # Security: this guarantees header values are small (hard-coded = 4kB) + # Security: this guarantees header values are small (hard-coded = 4 KiB) if len(line) > MAX_LINE: raise ValueError("Line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 5c60348aa..981c0975c 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -99,7 +99,7 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. The ``max_size`` parameter enforces the maximum size for incoming messages - in bytes. The default value is 1MB. ``None`` disables the limit. If a + in bytes. The default value is 1 MiB. ``None`` disables the limit. If a message larger than the maximum size is received, :meth:`recv()` will raise :exc:`~websockets.exceptions.ConnectionClosed` and the connection will be closed with status code 1009. @@ -117,17 +117,17 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): Since Python can use up to 4 bytes of memory to represent a single character, each websocket connection may use up to ``4 * max_size * max_queue`` bytes of memory to store incoming messages. By default, - this is 128MB. You may want to lower the limits, depending on your + this is 128 MiB. You may want to lower the limits, depending on your application's requirements. The ``read_limit`` argument sets the high-water limit of the buffer for incoming bytes. The low-water limit is half the high-water limit. The - default value is 64kB, half of asyncio's default (based on the current + default value is 64 KiB, half of asyncio's default (based on the current implementation of :class:`~asyncio.StreamReader`). The ``write_limit`` argument sets the high-water limit of the buffer for outgoing bytes. The low-water limit is a quarter of the high-water limit. - The default value is 64kB, equal to asyncio's default (based on the + The default value is 64 KiB, equal to asyncio's default (based on the current implementation of ``FlowControlMixin``). As soon as the HTTP request and response in the opening handshake are From 7d72dabd100b65bb05580f3e0163e3b7ce3dc787 Mon Sep 17 00:00:00 2001 From: q Date: Tue, 1 Jan 2019 09:55:26 +0800 Subject: [PATCH 036/281] Enable GNU Readline for interactive client --- src/websockets/__main__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 350fc06e8..078733912 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -152,6 +152,11 @@ def main(): ) sys.stderr.flush() + try: + import readline # noqa + except ImportError: # Windows has no `readline` normally + pass + # Parse command line arguments. parser = argparse.ArgumentParser( prog="python -m websockets", From 04336426f894374012da0933ee370d5c20abeeda Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 1 Jan 2019 17:06:27 +0100 Subject: [PATCH 037/281] Indicate which functions are coroutines. Also fix indentation issues in the API docs. Thanks @cjerdonek for the idea and @njsmith for sphinxcontrib-trio. Fix #295 (assuming RTD builds properly). --- .readthedocs.yml | 7 +++++++ docs/api.rst | 34 +++++++++++++++++++--------------- docs/conf.py | 7 ++++++- src/websockets/server.py | 3 +-- 4 files changed, 33 insertions(+), 18 deletions(-) create mode 100644 .readthedocs.yml diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 000000000..e5e224afd --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,7 @@ +build: + image: latest + +python: + version: 3.6 + +requirements_file: docs/requirements.txt diff --git a/docs/api.rst b/docs/api.rst index e480604bb..ce6529d1d 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -32,21 +32,24 @@ Server .. automodule:: websockets.server - .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + :async: - .. autofunction:: unix_serve(ws_handler, path, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + .. autofunction:: unix_serve(ws_handler, path, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + :async: - .. autoclass:: WebSocketServer + .. autoclass:: WebSocketServer .. automethod:: close() .. automethod:: wait_closed() .. autoattribute:: sockets - .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) + .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) .. automethod:: handshake(origins=None, available_extensions=None, available_subprotocols=None, extra_headers=None) .. automethod:: process_request(path, request_headers) + :async: .. automethod:: select_subprotocol(client_subprotocols, server_subprotocols) Client @@ -54,9 +57,10 @@ Client .. automodule:: websockets.client - .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) + .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) + :async: - .. autoclass:: WebSocketClientProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) + .. autoclass:: WebSocketClientProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) .. automethod:: handshake(wsuri, origin=None, available_extensions=None, available_subprotocols=None, extra_headers=None) @@ -65,7 +69,7 @@ Shared .. automodule:: websockets.protocol - .. autoclass:: WebSocketCommonProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) + .. autoclass:: WebSocketCommonProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) .. automethod:: close(code=1000, reason='') .. automethod:: wait_closed() @@ -87,15 +91,15 @@ Per-Message Deflate Extension .. automodule:: websockets.extensions.permessage_deflate - .. autoclass:: ServerPerMessageDeflateFactory + .. autoclass:: ServerPerMessageDeflateFactory - .. autoclass:: ClientPerMessageDeflateFactory + .. autoclass:: ClientPerMessageDeflateFactory Exceptions .......... .. automodule:: websockets.exceptions - :members: + :members: Low-level --------- @@ -104,25 +108,25 @@ Opening handshake ................. .. automodule:: websockets.handshake - :members: + :members: Data transfer ............. .. automodule:: websockets.framing - :members: + :members: URI parser .......... .. automodule:: websockets.uri - :members: + :members: Utilities ......... .. automodule:: websockets.headers - :members: + :members: .. automodule:: websockets.http - :members: + :members: diff --git a/docs/conf.py b/docs/conf.py index 1a5448f7b..4ad4ad4b7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,12 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode'] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.viewcode', + 'sphinxcontrib_trio', + ] # Spelling check needs an additional module that is not installed by default. # Add it only if spelling check is requested so docs can be generated without it. diff --git a/src/websockets/server.py b/src/websockets/server.py index 839b3c861..c0dc29dc3 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -395,8 +395,7 @@ def process_subprotocol(self, headers, available_subprotocols): return subprotocol - @staticmethod - def select_subprotocol(client_subprotocols, server_subprotocols): + def select_subprotocol(self, client_subprotocols, server_subprotocols): """ Pick a subprotocol among those offered by the client. From 76d739dfcf85b3739181c11dcbaab4d4b542e354 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 1 Jan 2019 17:13:52 +0100 Subject: [PATCH 038/281] Add RTD requirements. (forgotten in previous commits) --- docs/requirements.txt | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 docs/requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..954e8c755 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,3 @@ +sphinx +sphinxcontrib-spelling +sphinxcontrib-trio From 8fc78fee48d52bb3c690e925bad0825613319296 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 1 Jan 2019 19:45:03 +0100 Subject: [PATCH 039/281] Send fragmented messages from async iterators. Fix #477. --- src/websockets/protocol.py | 35 ++++++++++++++--- tests/test_protocol.py | 80 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 5 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 981c0975c..8dacbf4ce 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -432,10 +432,11 @@ async def send(self, data): object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) as a binary frame. - It also accepts an iterable of strings or bytes-like objects. Each - item is treated as a message fragment and sent in its own frame. All - items must be of the same type, or else :meth:`send` will raise a - :exc:`TypeError` and the connection will be closed. + It also accepts an iterable or an asynchronous iterator of strings or + bytes-like objects. Each item is treated as a message fragment and + sent in its own frame. All items must be of the same type, or else + :meth:`send` will raise a :exc:`TypeError` and the connection will be + closed. It raises a :exc:`TypeError` for other inputs. @@ -482,7 +483,31 @@ async def send(self, data): # Fragmented message -- asynchronous iterator - # To be implemented after dropping support for Python 3.4. + elif isinstance(data, collections.abc.AsyncIterable): + # aiter_data = aiter(data) without aiter + aiter_data = type(data).__aiter__(data) + + # First fragment. + try: + # data = anext(aiter_data) without anext + data = await type(aiter_data).__anext__(aiter_data) + except StopAsyncIteration: + return + opcode, data = prepare_data(data) + await self.write_frame(False, opcode, data) + + # Other fragments. + async for data in aiter_data: + confirm_opcode, data = prepare_data(data) + if confirm_opcode != opcode: + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") else: raise TypeError("data must be bytes, str, or iterable") diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 70c2be0bd..7a8b0a69a 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -27,6 +27,27 @@ MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) +class async_iterable: + + # In Python ≥ 3.6, this can be simplified to: + + # async def async_iterable(iterable): + # for item in iterable: + # yield item + + def __init__(self, iterable): + self.iterator = iter(iterable) + + def __aiter__(self): + return self + + async def __anext__(self): + try: + return next(self.iterator) + except StopIteration: + raise StopAsyncIteration + + class TransportMock(unittest.mock.Mock): """ Transport mock to control the protocol's inputs and outputs in tests. @@ -599,6 +620,65 @@ def test_send_iterable_mixed_type_error(self): (True, OP_CLOSE, serialize_close(1011, "")), ) + def test_send_async_iterable_text(self): + self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"]))) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + ) + + def test_send_async_iterable_binary(self): + self.loop.run_until_complete(self.protocol.send(async_iterable([b"te", b"a"]))) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_bytearray(self): + self.loop.run_until_complete( + self.protocol.send(async_iterable([bytearray(b"te"), bytearray(b"a")])) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_memoryview(self): + self.loop.run_until_complete( + self.protocol.send(async_iterable([memoryview(b"te"), memoryview(b"a")])) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete( + self.protocol.send( + async_iterable([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) + ) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_empty_async_iterable(self): + self.loop.run_until_complete(self.protocol.send(async_iterable([]))) + self.assertNoFrameSent() + + def test_send_async_iterable_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send(async_iterable([42]))) + self.assertNoFrameSent() + + def test_send_async_iterable_mixed_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete( + self.protocol.send(async_iterable(["café", b"tea"])) + ) + self.assertFramesSent( + (False, OP_TEXT, "café".encode("utf-8")), + (True, OP_CLOSE, serialize_close(1011, "")), + ) + def test_send_on_closing_connection_local(self): close_task = self.half_close_connection_local() From 207518d813347b42d5f2fb9f50b09bd101016a24 Mon Sep 17 00:00:00 2001 From: Thierry Parmentelat Date: Fri, 18 Jan 2019 13:07:06 +0100 Subject: [PATCH 040/281] typo in documentation --- src/websockets/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 8dacbf4ce..457e37b80 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -330,7 +330,7 @@ def closed(self): """ This property is ``True`` once the connection is closed. - Be aware that both :attr:`open` and :attr`closed` are ``False`` during + Be aware that both :attr:`open` and :attr:`closed` are ``False`` during the opening and closing sequences. """ From ec2e589b22146c394c2adfd47f5995db76ca1184 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 26 Jan 2019 09:58:53 +0100 Subject: [PATCH 041/281] Update `make clean` after introducing the src dir. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 0863f8578..9fa5c2422 100644 --- a/Makefile +++ b/Makefile @@ -18,4 +18,4 @@ coverage: clean: find . -name '*.pyc' -o -name '*.so' -delete find . -name __pycache__ -delete - rm -rf .coverage build compliance/reports dist docs/_build htmlcov MANIFEST README websockets.egg-info + rm -rf .coverage build compliance/reports dist docs/_build htmlcov MANIFEST README src/websockets.egg-info From ed8d800304b0b2f0959060a3a83086131e208ed0 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 25 Jan 2019 22:25:30 +0100 Subject: [PATCH 042/281] Drop support for Python 3.5. --- .appveyor.yml | 4 ++-- .circleci/config.yml | 12 ------------ .travis.yml | 4 ++-- README.rst | 2 +- docs/changelog.rst | 2 +- setup.cfg | 2 +- setup.py | 7 +++---- tox.ini | 2 +- 8 files changed, 11 insertions(+), 24 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 5109200b4..7954ee4be 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -5,8 +5,8 @@ branches: skip_branch_with_pr: true environment: -# websockets only works on Python >= 3.5. - CIBW_SKIP: cp27-* cp33-* cp34-* +# websockets only works on Python >= 3.6. + CIBW_SKIP: cp27-* cp33-* cp34-* cp35-* CIBW_TEST_COMMAND: python -W default -m unittest WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 diff --git a/.circleci/config.yml b/.circleci/config.yml index 5ec5b5103..8a7df9ac6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,15 +11,6 @@ jobs: - run: sudo pip install tox codecov - run: tox -e coverage,black,flake8,isort - run: codecov - py35: - docker: - - image: circleci/python:3.5 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py35 py36: docker: - image: circleci/python:3.6 @@ -44,9 +35,6 @@ workflows: build: jobs: - main - - py35: - requires: - - main - py36: requires: - main diff --git a/.travis.yml b/.travis.yml index c0f11357e..030693759 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ env: global: - # websockets only works on Python >= 3.5. - - CIBW_SKIP="cp27-* cp33-* cp34-*" + # websockets only works on Python >= 3.6. + - CIBW_SKIP="cp27-* cp33-* cp34-* cp35-*" - CIBW_TEST_COMMAND="python3 -W default -m unittest" - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 diff --git a/README.rst b/README.rst index 572647a15..0da52524e 100644 --- a/README.rst +++ b/README.rst @@ -124,7 +124,7 @@ Why shouldn't I use ``websockets``? and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP is minimal — just enough for a HTTP health check. * If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which - only works on Python 3. ``websockets`` requires Python ≥ 3.5. + only works on Python 3. ``websockets`` requires Python ≥ 3.6. What else? ---------- diff --git a/docs/changelog.rst b/docs/changelog.rst index 320300f64..b53080501 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,7 +10,7 @@ Changelog .. warning:: - **Version 8.0 drops compatibility with Python 3.4.** + **Version 8.0 drops compatibility with Python 3.4 and 3.5.** .. warning:: diff --git a/setup.cfg b/setup.cfg index 88b9b1a33..c306b2d4f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bdist_wheel] -python-tag = py35.py36.py37 +python-tag = py36.py37 [metadata] license_file = LICENSE diff --git a/setup.py b/setup.py index 2956058a4..1fe71a4f0 100644 --- a/setup.py +++ b/setup.py @@ -14,8 +14,8 @@ py_version = sys.version_info[:2] -if py_version < (3, 5): - raise Exception("websockets requires Python >= 3.5.") +if py_version < (3, 6): + raise Exception("websockets requires Python >= 3.6.") packages = ['websockets', 'websockets/extensions'] @@ -47,7 +47,6 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', ], @@ -56,6 +55,6 @@ ext_modules=ext_modules, include_package_data=True, zip_safe=True, - python_requires='>=3.5', + python_requires='>=3.6', test_loader='unittest:TestLoader', ) diff --git a/tox.ini b/tox.ini index de0f285d0..238fcd649 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py35,py36,py37,coverage,black,flake8,isort +envlist = py36,py37,coverage,black,flake8,isort [testenv] commands = python -W default -m unittest {posargs} From 67434cf4d996c259d82b21bb3bcfd1ce0d19c74e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 26 Jan 2019 09:29:46 +0100 Subject: [PATCH 043/281] =?UTF-8?q?Update=20documentation=20for=20Python?= =?UTF-8?q?=20=E2=89=A5=203.6.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.rst | 4 +- docs/cheatsheet.rst | 10 ++--- docs/deployment.rst | 7 --- docs/intro.rst | 87 ++------------------------------------ example/old_client.py | 24 ----------- example/old_server.py | 20 --------- example/old_shutdown.py | 29 ------------- src/websockets/client.py | 4 +- src/websockets/protocol.py | 3 +- src/websockets/server.py | 4 +- 10 files changed, 15 insertions(+), 177 deletions(-) delete mode 100755 example/old_client.py delete mode 100755 example/old_server.py delete mode 100755 example/old_shutdown.py diff --git a/README.rst b/README.rst index 0da52524e..ae47c7a48 100644 --- a/README.rst +++ b/README.rst @@ -36,7 +36,7 @@ Python with a focus on correctness and simplicity. Built on top of ``asyncio``, Python's standard asynchronous I/O framework, it provides an elegant coroutine-based API. -Here's how a client sends and receives messages (Python ≥ 3.6): +Here's how a client sends and receives messages: .. copy-pasted because GitHub doesn't support the include directive @@ -55,7 +55,7 @@ Here's how a client sends and receives messages (Python ≥ 3.6): asyncio.get_event_loop().run_until_complete( hello('ws://localhost:8765')) -And here's an echo server (Python ≥ 3.6): +And here's an echo server: .. code:: python diff --git a/docs/cheatsheet.rst b/docs/cheatsheet.rst index 3b8993a8c..15a731084 100644 --- a/docs/cheatsheet.rst +++ b/docs/cheatsheet.rst @@ -27,9 +27,8 @@ Server needed in general. * Create a server with :func:`~server.serve` which is similar to asyncio's - :meth:`~asyncio.AbstractEventLoop.create_server`. - - * On Python ≥ 3.5.1, you can also use it as an asynchronous context manager. + :meth:`~asyncio.AbstractEventLoop.create_server`. You can also use it as an + asynchronous context manager. * The server takes care of establishing connections, then lets the handler execute the application logic, and finally closes the connection after the @@ -43,9 +42,8 @@ Client ------ * Create a client with :func:`~client.connect` which is similar to asyncio's - :meth:`~asyncio.BaseEventLoop.create_connection`. - - * On Python ≥ 3.5.1, you can also use it as an asynchronous context manager. + :meth:`~asyncio.BaseEventLoop.create_connection`. You can also use it as an + asynchronous context manager. * For advanced customization, you may subclass :class:`~server.WebSocketClientProtocol` and pass either this subclass or diff --git a/docs/deployment.rst b/docs/deployment.rst index f8bc7f94b..b0c05dd73 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -37,13 +37,6 @@ Here's a full example (Unix-only): .. literalinclude:: ../example/shutdown.py :emphasize-lines: 13,17-19 -websockets supports asynchronous context managers on Python ≥ 3.5.1. ``async -for`` was introduced in Python 3.6. Here's the equivalent for older Python -versions: - -.. literalinclude:: ../example/old_shutdown.py - :emphasize-lines: 22-25 - It's more difficult to achieve the same effect on Windows. Some third-party projects try to help with this problem. diff --git a/docs/intro.rst b/docs/intro.rst index dea152ab1..389896ef4 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -6,17 +6,12 @@ Getting started Requirements ------------ -``websockets`` requires Python ≥ 3.5. +``websockets`` requires Python ≥ 3.6. You should use the latest version of Python if possible. If you're using an older version, be aware that for each minor version (3.x), only the latest bugfix release (3.x.y) is officially supported. -.. warning:: - - This documentation is written for Python ≥ 3.6. If you're using an older - Python version, you need to :ref:`adapt the code samples `. - Installation ------------ @@ -61,16 +56,13 @@ because they reduce the risk of interference by bad proxies. The WSS protocol is to WS what HTTPS is to HTTP: the connection is encrypted with TLS. WSS requires TLS certificates like HTTPS. -Here's how to adapt the server example to provide secure connections, using -APIs available in Python ≥ 3.6. - -Refer to the documentation of the :mod:`ssl` module for configuring the -context securely or adapting the code to older Python versions. +Here's how to adapt the server example to provide secure connections. See the +documentation of the :mod:`ssl` module for configuring the context securely. .. literalinclude:: ../example/secure_server.py :emphasize-lines: 19,23-24 -Here's how to adapt the client, also on Python ≥ 3.6. +Here's how to adapt the client. .. literalinclude:: ../example/secure_client.py :emphasize-lines: 10,15-16 @@ -137,18 +129,6 @@ messages received on the WebSocket connection. Iteration terminates when the client disconnects. -Asynchronous iteration was introduced in Python 3.6; here's the same code for -earlier Python versions:: - - async def consumer_handler(websocket, path): - while True: - message = await websocket.recv() - await consumer(message) - -:meth:`~protocol.WebSocketCommonProtocol.recv` raises a -:exc:`~exceptions.ConnectionClosed` exception when the client disconnects, -which breaks out of the ``while True`` loop. - Producer ........ @@ -226,62 +206,3 @@ One more thing... ``websockets`` provides an interactive client:: $ python -m websockets wss://echo.websocket.org/ - -.. _python-lt-36: - -Python < 3.6 ------------- - -This documentation takes advantage of several features that aren't available -in Python < 3.6: - -- Asynchronous context managers didn't work well until Python 3.5.1; -- Asynchronous iterators were added in Python 3.6; -- f-strings were introduced in Python 3.6 (this is unrelated to :mod:`asyncio` - and :mod:`websockets`). - -Here's how to adapt the basic server example. - -.. literalinclude:: ../example/old_server.py - -And here's the basic client example. - -.. literalinclude:: ../example/old_client.py - -Asynchronous context managers -............................. - -Asynchronous context managers were added in Python 3.5. However, -``websockets`` only supports them on Python ≥ 3.5.1, where -:func:`~asyncio.ensure_future` accepts any awaitable. - -If you're using Python < 3.5.1, instead of:: - - with websockets.connect(...) as client: - ... - -you must write:: - - client = yield from websockets.connect(...) - try: - ... - finally: - yield from client.close() - -Asynchronous iterators -...................... - -If you're using Python < 3.6, you must replace:: - - async for message in websocket: - ... - -with:: - - while True: - message = yield from websocket.recv() - ... - -The latter will always raise a :exc:`~exceptions.ConnectionClosed` exception -when the connection is closed, while the former will only raise that exception -if the connection terminates with an error. diff --git a/example/old_client.py b/example/old_client.py deleted file mode 100755 index be34f14be..000000000 --- a/example/old_client.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python - -# WS client example for old Python versions - -import asyncio -import websockets - -async def hello(): - websocket = await websockets.connect( - 'ws://localhost:8765/') - - try: - name = input("What's your name? ") - - await websocket.send(name) - print("> {}".format(name)) - - greeting = await websocket.recv() - print("< {}".format(greeting)) - - finally: - await websocket.close() - -asyncio.get_event_loop().run_until_complete(hello()) diff --git a/example/old_server.py b/example/old_server.py deleted file mode 100755 index 8c63e33e6..000000000 --- a/example/old_server.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -# WS server example for old Python versions - -import asyncio -import websockets - -async def hello(websocket, path): - name = await websocket.recv() - print("< {}".format(name)) - - greeting = "Hello {}!".format(name) - - await websocket.send(greeting) - print("> {}".format(greeting)) - -start_server = websockets.serve(hello, 'localhost', 8765) - -asyncio.get_event_loop().run_until_complete(start_server) -asyncio.get_event_loop().run_forever() diff --git a/example/old_shutdown.py b/example/old_shutdown.py deleted file mode 100755 index 180da9059..000000000 --- a/example/old_shutdown.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -import asyncio -import signal -import websockets - -async def echo(websocket, path): - while True: - try: - msg = await websocket.recv() - except websockets.ConnectionClosed: - break - else: - await websocket.send(msg) - -loop = asyncio.get_event_loop() - -# Create the server. -start_server = websockets.serve(echo, 'localhost', 8765) -server = loop.run_until_complete(start_server) - -# Run the server until receiving SIGTERM. -stop = asyncio.Future() -loop.add_signal_handler(signal.SIGTERM, stop.set_result, None) -loop.run_until_complete(stop) - -# Shut down the server. -server.close() -loop.run_until_complete(server.wait_closed()) diff --git a/src/websockets/client.py b/src/websockets/client.py index 46dd1b447..5e504969b 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -316,8 +316,8 @@ class Connect: :class:`WebSocketClientProtocol` which can then be used to send and receive messages. - On Python ≥ 3.5.1, :func:`connect` can be used as a asynchronous context - manager. In that case, the connection is closed when exiting the context. + :func:`connect` can also be used as a asynchronous context manager. In + that case, the connection is closed when exiting the context. :func:`connect` is a wrapper around the event loop's :meth:`~asyncio.BaseEventLoop.create_connection` method. Unknown keyword diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 457e37b80..1e0814dcf 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -57,8 +57,7 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): control frames automatically. It sends outgoing data frames and performs the closing handshake. - On Python ≥ 3.6, :class:`WebSocketCommonProtocol` instances support - asynchronous iteration:: + :class:`WebSocketCommonProtocol` supports asynchronous iteration:: async for message in websocket: await process(message) diff --git a/src/websockets/server.py b/src/websockets/server.py index c0dc29dc3..17b13aec2 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -676,8 +676,8 @@ class Serve: :meth:`~websockets.server.WebSocketServer.wait_closed` methods for terminating the server and cleaning up its resources. - On Python ≥ 3.5.1, :func:`serve` can also be used as an asynchronous context - manager. In this case, the server is shut down when exiting the context. + :func:`serve` can also be used as an asynchronous context manager. In + this case, the server is shut down when exiting the context. :func:`serve` is a wrapper around the event loop's :meth:`~asyncio.AbstractEventLoop.create_server` method. Internally, it From 17cb6949f40f84acc505c5b13f10837f5cb327e4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 26 Jan 2019 09:55:52 +0100 Subject: [PATCH 044/281] =?UTF-8?q?Update=20tests=20for=20Python=20?= =?UTF-8?q?=E2=89=A5=203.6.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/test_client_server.py | 9 ++------- tests/test_protocol.py | 22 +++------------------- 2 files changed, 5 insertions(+), 26 deletions(-) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 6b80c7f6e..20cef5925 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1104,19 +1104,14 @@ class SSLClientServerTests(ClientServerTests): @property def server_context(self): - # Change to ssl.PROTOCOL_TLS_SERVER when dropping Python < 3.6. - ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ssl_context.load_cert_chain(testcert) return ssl_context @property def client_context(self): - # Change to ssl.PROTOCOL_TLS_CLIENT when dropping Python < 3.6. - # Then remove verify_mode and check_hostname below. - ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(testcert) - ssl_context.verify_mode = ssl.CERT_REQUIRED - ssl_context.check_hostname = True return ssl_context def start_server(self, **kwds): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 7a8b0a69a..9e9d40393 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -27,25 +27,9 @@ MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) -class async_iterable: - - # In Python ≥ 3.6, this can be simplified to: - - # async def async_iterable(iterable): - # for item in iterable: - # yield item - - def __init__(self, iterable): - self.iterator = iter(iterable) - - def __aiter__(self): - return self - - async def __anext__(self): - try: - return next(self.iterator) - except StopIteration: - raise StopAsyncIteration +async def async_iterable(iterable): + for item in iterable: + yield item class TransportMock(unittest.mock.Mock): From 31106eb42c846e50e1956043e9f2564e398ccd6c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 25 Jan 2019 22:30:44 +0100 Subject: [PATCH 045/281] Remove conditional code for Python < 3.6. --- setup.py | 3 - src/websockets/client.py | 19 +----- src/websockets/protocol.py | 25 +++++-- src/websockets/py36/__init__.py | 2 - src/websockets/py36/protocol.py | 20 ------ src/websockets/server.py | 23 +------ tests/py36/__init__.py | 0 tests/py36/_test_client_server.py | 105 ------------------------------ tests/test_client_server.py | 100 +++++++++++++++++++++++----- 9 files changed, 107 insertions(+), 190 deletions(-) delete mode 100644 src/websockets/py36/__init__.py delete mode 100644 src/websockets/py36/protocol.py delete mode 100644 tests/py36/__init__.py delete mode 100644 tests/py36/_test_client_server.py diff --git a/setup.py b/setup.py index 1fe71a4f0..d4fadb240 100644 --- a/setup.py +++ b/setup.py @@ -19,9 +19,6 @@ packages = ['websockets', 'websockets/extensions'] -if py_version >= (3, 6): - packages.append('websockets/py36') - ext_modules = [ setuptools.Extension( 'websockets.speedups', diff --git a/src/websockets/client.py b/src/websockets/client.py index 5e504969b..9babbb412 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -6,7 +6,6 @@ import asyncio import collections.abc import logging -import sys from .exceptions import ( InvalidHandshake, @@ -520,20 +519,4 @@ def __await__(self): return self.__await_impl__().__await__() -# We can't define __await__ on Python < 3.5.1 because asyncio.ensure_future -# didn't accept arbitrary awaitables until Python 3.5.1. We don't define -# __aenter__ and __aexit__ either on Python < 3.5.1 to keep things simple. -if sys.version_info[:3] < (3, 5, 1): # pragma: no cover - - del Connect.__aenter__ - del Connect.__aexit__ - del Connect.__await__ - - async def connect(*args, **kwds): - return Connect(*args, **kwds).__iter__() - - connect.__doc__ = Connect.__doc__ - -else: - - connect = Connect +connect = Connect diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 1e0814dcf..f87f40086 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -15,7 +15,6 @@ import logging import random import struct -import sys from .exceptions import ( ConnectionClosed, @@ -347,6 +346,24 @@ async def wait_closed(self): """ await asyncio.shield(self.connection_lost_waiter) + async def __aiter__(self): + """ + Iterate on received messages. + + Exit normally when the connection is closed with code 1000 or 1001. + + Raise an exception in other cases. + + """ + try: + while True: + yield await self.recv() + except ConnectionClosed as exc: + if exc.code == 1000 or exc.code == 1001: + return + else: + raise + async def recv(self): """ This coroutine receives the next message. @@ -1225,9 +1242,3 @@ def connection_lost(self, exc): # - it must never be canceled. self.connection_lost_waiter.set_result(None) super().connection_lost(exc) - - -if sys.version_info[:2] >= (3, 6): # pragma: no cover - from .py36.protocol import __aiter__ - - WebSocketCommonProtocol.__aiter__ = __aiter__ diff --git a/src/websockets/py36/__init__.py b/src/websockets/py36/__init__.py deleted file mode 100644 index b9211bf87..000000000 --- a/src/websockets/py36/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This package contains code using async iteration added in Python 3.6. -# It cannot be imported on Python < 3.6 because it triggers syntax errors. diff --git a/src/websockets/py36/protocol.py b/src/websockets/py36/protocol.py deleted file mode 100644 index f0784de05..000000000 --- a/src/websockets/py36/protocol.py +++ /dev/null @@ -1,20 +0,0 @@ -from ..exceptions import ConnectionClosed - - -async def __aiter__(self): - """ - Iterate on received messages. - - Exit normally when the connection is closed with code 1000. - - Raise an exception in other cases. - - """ - try: - while True: - yield await self.recv() - except ConnectionClosed as exc: - if exc.code == 1000 or exc.code == 1001: - return - else: - raise diff --git a/src/websockets/server.py b/src/websockets/server.py index 17b13aec2..979fbcd1b 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -8,7 +8,6 @@ import email.utils import http import logging -import sys import warnings from .exceptions import ( @@ -861,6 +860,9 @@ def __await__(self): return self.__await_impl__().__await__() +serve = Serve + + def unix_serve(ws_handler, path, **kwargs): """ Similar to :func:`serve()`, but for listening on Unix sockets. @@ -874,22 +876,3 @@ def unix_serve(ws_handler, path, **kwargs): """ return serve(ws_handler, path=path, **kwargs) - - -# We can't define __await__ on Python < 3.5.1 because asyncio.ensure_future -# didn't accept arbitrary awaitables until Python 3.5.1. We don't define -# __aenter__ and __aexit__ either on Python < 3.5.1 to keep things simple. -if sys.version_info[:3] < (3, 5, 1): # pragma: no cover - - del Serve.__aenter__ - del Serve.__aexit__ - del Serve.__await__ - - async def serve(*args, **kwds): - return Serve(*args, **kwds).__iter__() - - serve.__doc__ = Serve.__doc__ - -else: - - serve = Serve diff --git a/tests/py36/__init__.py b/tests/py36/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/py36/_test_client_server.py b/tests/py36/_test_client_server.py deleted file mode 100644 index 10b135cc9..000000000 --- a/tests/py36/_test_client_server.py +++ /dev/null @@ -1,105 +0,0 @@ -# Tests containing Python 3.6+ syntax, extracted from test_client_server.py. - -import asyncio -import sys -import unittest - -from websockets.client import * -from websockets.exceptions import ConnectionClosed -from websockets.server import * - -from ..test_client_server import get_server_uri - - -# Fail at import time, not just at run time, to prevent test -# discovery. -if sys.version_info[:2] < (3, 6): # pragma: no cover - raise ImportError("Python 3.6+ only") - - -MESSAGES = ["3", "2", "1", "Fire!"] - - -class AsyncIteratorTests(unittest.TestCase): - - # This is a protocol-level feature, but since it's a high-level API, it is - # much easier to exercise at the client or server level. - - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - - def test_iterate_on_messages(self): - async def handler(ws, path): - for message in MESSAGES: - await ws.send(message) - - start_server = serve(handler, "localhost", 0) - server = self.loop.run_until_complete(start_server) - - messages = [] - - async def run_client(): - nonlocal messages - async with connect(get_server_uri(server)) as ws: - async for message in ws: - messages.append(message) - - self.loop.run_until_complete(run_client()) - - self.assertEqual(messages, MESSAGES) - - server.close() - self.loop.run_until_complete(server.wait_closed()) - - def test_iterate_on_messages_going_away_exit_ok(self): - async def handler(ws, path): - for message in MESSAGES: - await ws.send(message) - await ws.close(1001) - - start_server = serve(handler, "localhost", 0) - server = self.loop.run_until_complete(start_server) - - messages = [] - - async def run_client(): - nonlocal messages - async with connect(get_server_uri(server)) as ws: - async for message in ws: - messages.append(message) - - self.loop.run_until_complete(run_client()) - - self.assertEqual(messages, MESSAGES) - - server.close() - self.loop.run_until_complete(server.wait_closed()) - - def test_iterate_on_messages_internal_error_exit_not_ok(self): - async def handler(ws, path): - for message in MESSAGES: - await ws.send(message) - await ws.close(1011) - - start_server = serve(handler, "localhost", 0) - server = self.loop.run_until_complete(start_server) - - messages = [] - - async def run_client(): - nonlocal messages - async with connect(get_server_uri(server)) as ws: - async for message in ws: - messages.append(message) - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(run_client()) - - self.assertEqual(messages, MESSAGES) - - server.close() - self.loop.run_until_complete(server.wait_closed()) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 20cef5925..cbac7a24c 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -7,7 +7,6 @@ import random import socket import ssl -import sys import tempfile import unittest import unittest.mock @@ -1306,10 +1305,6 @@ def setUp(self): def tearDown(self): self.loop.close() - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) def test_client(self): start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) @@ -1327,10 +1322,6 @@ async def run_client(): server.close() self.loop.run_until_complete(server.wait_closed()) - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) def test_server(self): async def run_server(): # Use serve as an asynchronous context manager. @@ -1342,10 +1333,6 @@ async def run_server(): self.loop.run_until_complete(run_server()) - # Asynchronous context managers are only enabled on Python ≥ 3.5.1. - @unittest.skipIf( - sys.version_info[:3] <= (3, 5, 0), "this test requires Python 3.5.1+" - ) @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "this test requires Unix sockets") def test_unix_server(self): async def run_server(path): @@ -1360,5 +1347,88 @@ async def run_server(path): self.loop.run_until_complete(run_server(path)) -if sys.version_info[:2] >= (3, 6): # pragma: no cover - from .py36._test_client_server import AsyncIteratorTests # noqa +class AsyncIteratorTests(unittest.TestCase): + + # This is a protocol-level feature, but since it's a high-level API, it is + # much easier to exercise at the client or server level. + + MESSAGES = ["3", "2", "1", "Fire!"] + + def setUp(self): + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.close() + + def test_iterate_on_messages(self): + async def handler(ws, path): + for message in self.MESSAGES: + await ws.send(message) + + start_server = serve(handler, "localhost", 0) + server = self.loop.run_until_complete(start_server) + + messages = [] + + async def run_client(): + nonlocal messages + async with connect(get_server_uri(server)) as ws: + async for message in ws: + messages.append(message) + + self.loop.run_until_complete(run_client()) + + self.assertEqual(messages, self.MESSAGES) + + server.close() + self.loop.run_until_complete(server.wait_closed()) + + def test_iterate_on_messages_going_away_exit_ok(self): + async def handler(ws, path): + for message in self.MESSAGES: + await ws.send(message) + await ws.close(1001) + + start_server = serve(handler, "localhost", 0) + server = self.loop.run_until_complete(start_server) + + messages = [] + + async def run_client(): + nonlocal messages + async with connect(get_server_uri(server)) as ws: + async for message in ws: + messages.append(message) + + self.loop.run_until_complete(run_client()) + + self.assertEqual(messages, self.MESSAGES) + + server.close() + self.loop.run_until_complete(server.wait_closed()) + + def test_iterate_on_messages_internal_error_exit_not_ok(self): + async def handler(ws, path): + for message in self.MESSAGES: + await ws.send(message) + await ws.close(1011) + + start_server = serve(handler, "localhost", 0) + server = self.loop.run_until_complete(start_server) + + messages = [] + + async def run_client(): + nonlocal messages + async with connect(get_server_uri(server)) as ws: + async for message in ws: + messages.append(message) + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(run_client()) + + self.assertEqual(messages, self.MESSAGES) + + server.close() + self.loop.run_until_complete(server.wait_closed()) From 5c08626717e29e55b3e0180050c1292833ceef44 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 26 Jan 2019 10:01:22 +0100 Subject: [PATCH 046/281] Remove workarounds for bugs fixed in Python 3.6. --- src/websockets/protocol.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index f87f40086..ec80ecbd9 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -906,12 +906,6 @@ async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): logger.debug("%s > %r", self.side, frame) frame.write(self.writer.write, mask=self.is_client, extensions=self.extensions) - # Backport of https://github.com/python/asyncio/pull/280. - # Remove when dropping support for Python < 3.6. - if self.writer.transport is not None: # pragma: no cover - if self.writer_is_closing(): - await asyncio.sleep(0) - try: # drain() cannot be called concurrently by multiple coroutines: # http://bugs.python.org/issue29930. Remove this lock when no @@ -926,25 +920,6 @@ async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # with the correct code and reason. await self.ensure_open() - def writer_is_closing(self): - """ - Backport of https://github.com/python/asyncio/pull/291. - - Replace with ``self.writer.transport.is_closing()`` when dropping - support for Python < 3.6 and with ``self.writer.is_closing()`` when - https://bugs.python.org/issue31491 is fixed. - - """ - transport = self.writer.transport - try: - return transport.is_closing() - except AttributeError: # pragma: no cover - # This emulates what is_closing would return if it existed. - try: - return transport._closing - except AttributeError: - return transport._closed - async def write_close_frame(self, data=b""): """ Write a close frame if and only if the connection state is OPEN. From d836f8b107f040ce3877c21d19a029c6a534343b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 25 Jan 2019 22:37:51 +0100 Subject: [PATCH 047/281] Take advantage of loop.create_future(). It's the best practice for creating futures in asyncio since Python 3.5.2. Fix #504. --- example/shutdown.py | 2 +- performance/mem_server.py | 2 +- src/websockets/__main__.py | 2 +- src/websockets/protocol.py | 8 ++++---- src/websockets/server.py | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/example/shutdown.py b/example/shutdown.py index dd3e8f6a4..6d75af192 100755 --- a/example/shutdown.py +++ b/example/shutdown.py @@ -15,7 +15,7 @@ async def echo_server(stop): loop = asyncio.get_event_loop() # The stop condition is set when receiving SIGTERM. -stop = asyncio.Future() +stop = loop.create_future() loop.add_signal_handler(signal.SIGTERM, stop.set_result, None) # Run the server until the stop condition is met. diff --git a/performance/mem_server.py b/performance/mem_server.py index 6c8cef2ec..0a4a29f76 100644 --- a/performance/mem_server.py +++ b/performance/mem_server.py @@ -49,7 +49,7 @@ async def mem_server(stop): loop = asyncio.get_event_loop() -stop = asyncio.Future() +stop = loop.create_future() loop.add_signal_handler(signal.SIGINT, stop.set_result, None) tracemalloc.start() diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 078733912..4303ce22f 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -173,7 +173,7 @@ def main(): inputs = asyncio.Queue(loop=loop) # Create a stop condition when receiving SIGINT or SIGTERM. - stop = asyncio.Future(loop=loop) + stop = loop.create_future() # Schedule the task that will manage the connection. asyncio.ensure_future(run_client(args.uri, loop, inputs, stop), loop=loop) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index ec80ecbd9..3e02f8465 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -229,7 +229,7 @@ def __init__( # :meth:`connection_lost()` callback to a :class:`~asyncio.Future` # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are # translated by ``self.stream_reader``). - self.connection_lost_waiter = asyncio.Future(loop=loop) + self.connection_lost_waiter = loop.create_future() # Queue of received messages. self.messages = collections.deque() @@ -405,7 +405,7 @@ async def recv(self): # Wait until there's a message in the queue (if necessary) or the # connection is closed. while len(self.messages) <= 0: - pop_message_waiter = asyncio.Future(loop=self.loop) + pop_message_waiter = self.loop.create_future() self._pop_message_waiter = pop_message_waiter try: # If asyncio.wait() is canceled, it doesn't cancel @@ -609,7 +609,7 @@ async def ping(self, data=None): while data is None or data in self.pings: data = struct.pack("!I", random.getrandbits(32)) - self.pings[data] = asyncio.Future(loop=self.loop) + self.pings[data] = self.loop.create_future() await self.write_frame(True, OP_PING, data) @@ -692,7 +692,7 @@ async def transfer_data(self): # Wait until there's room in the queue (if necessary). while len(self.messages) >= self.max_queue: - self._put_message_waiter = asyncio.Future(loop=self.loop) + self._put_message_waiter = self.loop.create_future() try: await self._put_message_waiter finally: diff --git a/src/websockets/server.py b/src/websockets/server.py index 979fbcd1b..a59107b24 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -546,7 +546,7 @@ def __init__(self, loop): self.close_task = None # Completed when the server is closed and connections are terminated. - self.closed_waiter = asyncio.Future(loop=loop) + self.closed_waiter = loop.create_future() def wrap(self, server): """ From 123d471bc94998651808343db69391930900a1f5 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 26 Jan 2019 13:03:20 +0100 Subject: [PATCH 048/281] Update compliance testing scripts for Python 3.6. --- compliance/test_client.py | 31 +++++++++++++------------------ compliance/test_server.py | 14 ++++++-------- 2 files changed, 19 insertions(+), 26 deletions(-) diff --git a/compliance/test_client.py b/compliance/test_client.py index 1c1d4416a..5fd0f4b4f 100644 --- a/compliance/test_client.py +++ b/compliance/test_client.py @@ -12,41 +12,36 @@ # logging.getLogger('websockets').setLevel(logging.DEBUG) -SERVER = 'ws://127.0.0.1:8642' -AGENT = 'websockets' +SERVER = "ws://127.0.0.1:8642" +AGENT = "websockets" async def get_case_count(server): - uri = server + '/getCaseCount' - ws = await websockets.connect(uri) - msg = await ws.recv() - await ws.close() + uri = f"{server}/getCaseCount" + async with websockets.connect(uri) as ws: + msg = ws.recv() return json.loads(msg) async def run_case(server, case, agent): - uri = server + '/runCase?case={}&agent={}'.format(case, agent) - ws = await websockets.connect(uri, max_size=2 ** 25, max_queue=1) - while True: - try: - msg = await ws.recv() + uri = f"{server}/runCase?case={case}&agent={agent}" + async with websockets.connect(uri, max_size=2 ** 25, max_queue=1) as ws: + async for msg in ws: await ws.send(msg) - except websockets.ConnectionClosed: - break async def update_reports(server, agent): - uri = server + '/updateReports?agent={}'.format(agent) - ws = await websockets.connect(uri) - await ws.close() + uri = f"{server}/updateReports?agent={agent}" + async with websockets.connect(uri): + pass async def run_tests(server, agent): cases = await get_case_count(server) for case in range(1, cases + 1): - print("Running test case {} out of {}".format(case, cases), end="\r") + print(f"Running test case {case} out of {cases}", end="\r") await run_case(server, case, agent) - print("Ran {} test cases ".format(cases)) + print(f"Ran {cases} test cases ") await update_reports(server, agent) diff --git a/compliance/test_server.py b/compliance/test_server.py index ac5990d16..8020f68d3 100644 --- a/compliance/test_server.py +++ b/compliance/test_server.py @@ -10,17 +10,15 @@ # logging.getLogger('websockets').setLevel(logging.DEBUG) +HOST, PORT = "127.0.0.1", 8642 + + async def echo(ws, path): - while True: - try: - msg = await ws.recv() - await ws.send(msg) - except websockets.ConnectionClosed: - break + async for msg in ws: + await ws.send(msg) -start_server = websockets.serve( - echo, '127.0.0.1', 8642, max_size=2 ** 25, max_queue=1) +start_server = websockets.serve(echo, HOST, PORT, max_size=2 ** 25, max_queue=1) try: asyncio.get_event_loop().run_until_complete(start_server) From 8c86ca1d1e9b8b947c5512f71a78afd5d36bd40c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 27 Jan 2019 20:57:11 +0100 Subject: [PATCH 049/281] Switch to f-strings for string formatting. --- docs/conf.py | 2 +- src/websockets/__main__.py | 60 ++++++++----------- src/websockets/client.py | 24 ++++---- src/websockets/exceptions.py | 24 ++++---- .../extensions/permessage_deflate.py | 23 +++---- src/websockets/framing.py | 6 +- src/websockets/headers.py | 4 +- src/websockets/http.py | 9 +-- src/websockets/protocol.py | 4 +- src/websockets/server.py | 8 +-- src/websockets/uri.py | 2 +- tests/test_client_server.py | 4 +- tests/test_http.py | 6 +- tests/test_protocol.py | 6 +- 14 files changed, 77 insertions(+), 105 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 4ad4ad4b7..504656afc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ # General information about the project. project = 'websockets' -copyright = '2013-{}, Aymeric Augustin'.format(datetime.date.today().year) +copyright = f'2013-{datetime.date.today().year}, Aymeric Augustin and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 4303ce22f..f438750c9 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -53,36 +53,32 @@ def exit_from_event_loop_thread(loop, stop): def print_during_input(string): sys.stdout.write( - ( - # Save cursor position - "\N{ESC}7" - # Add a new line - "\N{LINE FEED}" - # Move cursor up - "\N{ESC}[A" - # Insert blank line, scroll last line down - "\N{ESC}[L" - # Print string in the inserted blank line - "{string}\N{LINE FEED}" - # Restore cursor position - "\N{ESC}8" - # Move cursor down - "\N{ESC}[B" - ).format(string=string) + # Save cursor position + "\N{ESC}7" + # Add a new line + "\N{LINE FEED}" + # Move cursor up + "\N{ESC}[A" + # Insert blank line, scroll last line down + "\N{ESC}[L" + # Print string in the inserted blank line + f"{string}\N{LINE FEED}" + # Restore cursor position + "\N{ESC}8" + # Move cursor down + "\N{ESC}[B" ) sys.stdout.flush() def print_over_input(string): sys.stdout.write( - ( - # Move cursor to beginning of line - "\N{CARRIAGE RETURN}" - # Delete current line - "\N{ESC}[K" - # Print string - "{string}\N{LINE FEED}" - ).format(string=string) + # Move cursor to beginning of line + "\N{CARRIAGE RETURN}" + # Delete current line + "\N{ESC}[K" + # Print string + f"{string}\N{LINE FEED}" ) sys.stdout.flush() @@ -91,11 +87,11 @@ async def run_client(uri, loop, inputs, stop): try: websocket = await websockets.connect(uri) except Exception as exc: - print_over_input("Failed to connect to {}: {}.".format(uri, exc)) + print_over_input(f"Failed to connect to {uri}: {exc}.") exit_from_event_loop_thread(loop, stop) return else: - print_during_input("Connected to {}.".format(uri)) + print_during_input(f"Connected to {uri}.") try: while True: @@ -130,9 +126,7 @@ async def run_client(uri, loop, inputs, stop): await websocket.close() close_status = format_close(websocket.close_code, websocket.close_reason) - print_over_input( - "Connection closed: {close_status}.".format(close_status=close_status) - ) + print_over_input(f"Connection closed: {close_status}.") exit_from_event_loop_thread(loop, stop) @@ -144,11 +138,9 @@ def main(): win_enable_vt100() except RuntimeError as exc: sys.stderr.write( - ( - "Unable to set terminal to VT100 mode. This is only " - "supported since Win10 anniversary update. Expect " - "weird symbols on the terminal.\nError: {exc!s}\n" - ).format(exc=exc) + f"Unable to set terminal to VT100 mode. This is only " + f"supported since Win10 anniversary update. Expect " + f"weird symbols on the terminal.\nError: {exc}\n" ) sys.stderr.flush() diff --git a/src/websockets/client.py b/src/websockets/client.py index 9babbb412..57fd33b25 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -52,7 +52,7 @@ def __init__( extensions=None, subprotocols=None, extra_headers=None, - **kwds + **kwds, ): self.origin = origin self.available_extensions = extensions @@ -73,7 +73,7 @@ def write_http_request(self, path, headers): # Since the path and headers only contain ASCII characters, # we can keep this simple. - request = "GET {path} HTTP/1.1\r\n".format(path=path) + request = f"GET {path} HTTP/1.1\r\n" request += str(headers) self.writer.write(request.encode()) @@ -170,9 +170,8 @@ def process_extensions(headers, available_extensions): # matched what the server sent. Fail the connection. else: raise NegotiationError( - "Unsupported extension: name = {}, params = {}".format( - name, response_params - ) + f"Unsupported extension: " + f"name = {name}, params = {response_params}" ) return accepted_extensions @@ -205,16 +204,13 @@ def process_subprotocol(headers, available_subprotocols): ) if len(parsed_header_values) > 1: - raise InvalidHandshake( - "Multiple subprotocols: {}".format(", ".join(parsed_header_values)) - ) + subprotocols = ", ".join(parsed_header_values) + raise InvalidHandshake(f"Multiple subprotocols: {subprotocols}") subprotocol = parsed_header_values[0] if subprotocol not in available_subprotocols: - raise NegotiationError( - "Unsupported subprotocol: {}".format(subprotocol) - ) + raise NegotiationError(f"Unsupported subprotocol: {subprotocol}") return subprotocol @@ -251,7 +247,7 @@ async def handshake( if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover request_headers["Host"] = wsuri.host else: - request_headers["Host"] = "{}:{}".format(wsuri.host, wsuri.port) + request_headers["Host"] = f"{wsuri.host}:{wsuri.port}" if wsuri.user_info: request_headers["Authorization"] = build_basic_auth(*wsuri.user_info) @@ -382,7 +378,7 @@ def __init__( extensions=None, subprotocols=None, extra_headers=None, - **kwds + **kwds, ): if loop is None: loop = asyncio.get_event_loop() @@ -417,7 +413,7 @@ def __init__( ClientPerMessageDeflateFactory(client_max_window_bits=True) ) elif compression is not None: - raise ValueError("Unsupported compression: {}".format(compression)) + raise ValueError(f"Unsupported compression: {compression}") self._create_protocol = create_protocol self._ping_interval = ping_interval diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 611e68188..50f3ab373 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -37,9 +37,7 @@ def __init__(self, status, headers, body=b""): self.status = status self.headers = headers self.body = body - message = "HTTP {}, {} headers, {} bytes".format( - status, len(headers), len(body) - ) + message = f"HTTP {status}, {len(headers)} headers, {len(body)} bytes" super().__init__(message) @@ -68,11 +66,11 @@ class InvalidHeader(InvalidHandshake): def __init__(self, name, value=None): if value is None: - message = "Missing {} header".format(name) + message = f"Missing {name} header" elif value == "": - message = "Empty {} header".format(name) + message = f"Empty {name} header" else: - message = "Invalid {} header: {}".format(name, value) + message = f"Invalid {name} header: {value}" super().__init__(message) @@ -83,7 +81,7 @@ class InvalidHeaderFormat(InvalidHeader): """ def __init__(self, name, error, string, pos): - error = "{} at {} in {}".format(error, pos, string) + error = f"{error} at {pos} in {string}" super().__init__(name, error) @@ -121,7 +119,7 @@ class InvalidStatusCode(InvalidHandshake): def __init__(self, status_code): self.status_code = status_code - message = "Status code not 101: {}".format(status_code) + message = f"Status code not 101: {status_code}" super().__init__(message) @@ -140,7 +138,7 @@ class InvalidParameterName(NegotiationError): def __init__(self, name): self.name = name - message = "Invalid parameter name: {}".format(name) + message = f"Invalid parameter name: {name}" super().__init__(message) @@ -153,7 +151,7 @@ class InvalidParameterValue(NegotiationError): def __init__(self, name, value): self.name = name self.value = value - message = "Invalid value for parameter {}: {}".format(name, value) + message = f"Invalid value for parameter {name}: {value}" super().__init__(message) @@ -165,7 +163,7 @@ class DuplicateParameter(NegotiationError): def __init__(self, name): self.name = name - message = "Duplicate parameter: {}".format(name) + message = f"Duplicate parameter: {name}" super().__init__(message) @@ -205,10 +203,10 @@ def format_close(code, reason): explanation = "private use" else: explanation = CLOSE_CODES.get(code, "unknown") - result = "code = {} ({}), ".format(code, explanation) + result = f"code = {code} ({explanation}), " if reason: - result += "reason = {}".format(reason) + result += f"reason = {reason}" else: result += "no reason" diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 167746021..2c2be49bd 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -186,7 +186,7 @@ def process_response_params(self, params, accepted_extensions): """ if any(other.name == self.name for other in accepted_extensions): - raise NegotiationError("Received duplicate {}".format(self.name)) + raise NegotiationError(f"Received duplicate {self.name}") # Request parameters are available in instance variables. @@ -339,7 +339,7 @@ def process_request_params(self, params, accepted_extensions): """ if any(other.name == self.name for other in accepted_extensions): - raise NegotiationError("Skipped duplicate {}".format(self.name)) + raise NegotiationError(f"Skipped duplicate {self.name}") # Load request parameters in local variables. ( @@ -491,16 +491,11 @@ def __init__( def __repr__(self): return ( - "PerMessageDeflate(" - "remote_no_context_takeover={}, " - "local_no_context_takeover={}, " - "remote_max_window_bits={}, " - "local_max_window_bits={})" - ).format( - self.remote_no_context_takeover, - self.local_no_context_takeover, - self.remote_max_window_bits, - self.local_max_window_bits, + f"PerMessageDeflate(" + f"remote_no_context_takeover={self.remote_no_context_takeover}, " + f"local_no_context_takeover={self.local_no_context_takeover}, " + f"remote_max_window_bits={self.remote_max_window_bits}, " + f"local_max_window_bits={self.local_max_window_bits})" ) def decode(self, frame, *, max_size=None): @@ -544,9 +539,7 @@ def decode(self, frame, *, max_size=None): data = self.decoder.decompress(data, max_length) if self.decoder.unconsumed_tail: raise PayloadTooBig( - "Uncompressed payload length exceeds size limit (? > {} bytes)".format( - max_size - ) + f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)" ) # Allow garbage collection of the decoder if it won't be reused. diff --git a/src/websockets/framing.py b/src/websockets/framing.py index c6b5564f5..0abe8f8db 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -117,9 +117,7 @@ async def read(cls, reader, *, mask, max_size=None, extensions=None): length, = struct.unpack("!Q", data) if max_size is not None and length > max_size: raise PayloadTooBig( - "Payload length exceeds size limit ({} > {} bytes)".format( - length, max_size - ) + f"Payload length exceeds size limit ({length} > {max_size} bytes)" ) if mask: mask_bits = await reader(4) @@ -231,7 +229,7 @@ def check(frame): if not frame.fin: raise WebSocketProtocolError("Fragmented control frame") else: - raise WebSocketProtocolError("Invalid opcode: {}".format(frame.opcode)) + raise WebSocketProtocolError(f"Invalid opcode: {frame.opcode}") def prepare_data(data): diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 6151b16db..73f11edce 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -302,7 +302,7 @@ def build_extension(name, parameters): [name] + [ # Quoted strings aren't necessary because values are always tokens. - name if value is None else "{}={}".format(name, value) + name if value is None else f"{name}={value}" for name, value in parameters ] ) @@ -347,6 +347,6 @@ def build_basic_auth(username, password): """ # https://tools.ietf.org/html/rfc7617#section-2 assert ":" not in username - user_pass = "{}:{}".format(username, password) + user_pass = f"{username}:{password}" basic_credentials = base64.b64encode(user_pass.encode()).decode() return "Basic " + basic_credentials diff --git a/src/websockets/http.py b/src/websockets/http.py index e28acac9f..ab74614af 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -25,7 +25,7 @@ MAX_HEADERS = 256 MAX_LINE = 4096 -USER_AGENT = "Python/{} websockets/{}".format(sys.version[:3], websockets_version) +USER_AGENT = f"Python/{sys.version[:3]} websockets/{websockets_version}" # See https://tools.ietf.org/html/rfc7230#appendix-B. @@ -252,13 +252,10 @@ def __init__(self, *args, **kwargs): self.update(*args, **kwargs) def __str__(self): - return ( - "".join("{}: {}\r\n".format(key, value) for key, value in self._list) - + "\r\n" - ) + return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n" def __repr__(self): - return "{}({})".format(self.__class__.__name__, repr(self._list)) + return f"{self.__class__.__name__}({self._list!r})" def copy(self): copy = self.__class__() diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 3e02f8465..f0b126934 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -167,7 +167,7 @@ def __init__( write_limit=2 ** 16, loop=None, legacy_recv=False, - timeout=10 + timeout=10, ): # Backwards-compatibility: close_timeout used to be called timeout. # If both are specified, timeout is ignored. @@ -899,7 +899,7 @@ async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # Defensive assertion for protocol compliance. if self.state is not _expected_state: # pragma: no cover raise InvalidState( - "Cannot write to a WebSocket in the {} state".format(self.state.name) + f"Cannot write to a WebSocket in the {self.state.name} state" ) frame = Frame(fin, opcode, data) diff --git a/src/websockets/server.py b/src/websockets/server.py index a59107b24..007ebd725 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -57,7 +57,7 @@ def __init__( extra_headers=None, process_request=None, select_subprotocol=None, - **kwds + **kwds, ): # For backwards-compatibility with 6.0 or earlier. if origins is not None and "" in origins: @@ -225,7 +225,7 @@ def write_http_response(self, status, headers, body=None): # Since the status line and headers only contain ASCII characters, # we can keep this simple. - response = "HTTP/1.1 {status.value} {status.phrase}\r\n".format(status=status) + response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" response += str(headers) self.writer.write(response.encode()) @@ -775,7 +775,7 @@ def __init__( extra_headers=None, process_request=None, select_subprotocol=None, - **kwds + **kwds, ): # Backwards-compatibility: close_timeout used to be called timeout. # If both are specified, timeout is ignored. @@ -803,7 +803,7 @@ def __init__( ): extensions.append(ServerPerMessageDeflateFactory()) elif compression is not None: - raise ValueError("Unsupported compression: {}".format(compression)) + raise ValueError(f"Unsupported compression: {compression}") factory = lambda: create_protocol( ws_handler, diff --git a/src/websockets/uri.py b/src/websockets/uri.py index b6e1ad0ce..730adf54e 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -47,7 +47,7 @@ def parse_uri(uri): assert uri.fragment == "" assert uri.hostname is not None except AssertionError as exc: - raise InvalidURI("{} isn't a valid URI".format(uri)) from exc + raise InvalidURI(f"{uri} isn't a valid URI") from exc secure = uri.scheme == "wss" host = uri.hostname diff --git a/tests/test_client_server.py b/tests/test_client_server.py index cbac7a24c..fc88b3139 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -147,7 +147,7 @@ def get_server_uri(server, secure=False, resource_name="/", user_info=None): if server_socket.family == socket.AF_INET6: # pragma: no cover host, port = server_socket.getsockname()[:2] # (no IPv6 on CI) - host = "[{}]".format(host) + host = f"[{host}]" elif server_socket.family == socket.AF_INET: host, port = server_socket.getsockname() elif server_socket.family == socket.AF_UNIX: @@ -156,7 +156,7 @@ def get_server_uri(server, secure=False, resource_name="/", user_info=None): else: # pragma: no cover raise ValueError("Expected an IPv6, IPv4, or Unix socket") - return "{}://{}{}:{}{}".format(proto, user_info, host, port, resource_name) + return f"{proto}://{user_info}{host}:{port}{resource_name}" class UnauthorizedServerProtocol(WebSocketServerProtocol): diff --git a/tests/test_http.py b/tests/test_http.py index b28bed6ce..a3a8cd403 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -108,15 +108,13 @@ def setUp(self): def test_str(self): self.assertEqual( - str(self.headers), - "Connection: Upgrade\r\nServer: {}\r\n\r\n".format(USER_AGENT), + str(self.headers), f"Connection: Upgrade\r\nServer: {USER_AGENT}\r\n\r\n" ) def test_repr(self): self.assertEqual( repr(self.headers), - "Headers([('Connection', 'Upgrade'), " - "('Server', '{}')])".format(USER_AGENT), + f"Headers([('Connection', 'Upgrade'), " f"('Server', '{USER_AGENT}')])", ) def test_multiple_values_error_str(self): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 9e9d40393..154948e43 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -321,8 +321,8 @@ def assertCompletesWithin(self, min_time, max_time): yield t1 = self.loop.time() dt = t1 - t0 - self.assertGreaterEqual(dt, min_time, "Too fast: {} < {}".format(dt, min_time)) - self.assertLess(dt, max_time, "Too slow: {} >= {}".format(dt, max_time)) + self.assertGreaterEqual(dt, min_time, f"Too fast: {dt} < {min_time}") + self.assertLess(dt, max_time, f"Too slow: {dt} >= {max_time}") # Test public attributes. @@ -499,7 +499,7 @@ def test_recv_canceled(self): def test_recv_canceled_race_condition(self): recv = self.loop.create_task( - asyncio.wait_for(self.protocol.recv(), timeout=0.000001) + asyncio.wait_for(self.protocol.recv(), timeout=0.000_001) ) self.loop.call_soon( self.receive_frame, Frame(True, OP_TEXT, "café".encode("utf-8")) From 7bff03bbd74ab10a08237efbfb61cc831fa67de8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 18 Jan 2019 22:47:33 +0100 Subject: [PATCH 050/281] Get a clean, non-strict mypy run. --- .circleci/config.yml | 2 +- .gitignore | 1 + Makefile | 1 + src/websockets/protocol.py | 10 +++++----- src/websockets/server.py | 11 +++++++---- src/websockets/speedups.pyi | 1 + tox.ini | 6 +++++- 7 files changed, 21 insertions(+), 11 deletions(-) create mode 100644 src/websockets/speedups.pyi diff --git a/.circleci/config.yml b/.circleci/config.yml index 8a7df9ac6..a6c85d237 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,7 +9,7 @@ jobs: - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - checkout - run: sudo pip install tox codecov - - run: tox -e coverage,black,flake8,isort + - run: tox -e coverage,black,flake8,isort,mypy - run: codecov py36: docker: diff --git a/.gitignore b/.gitignore index 4dc1216b7..ef0d16520 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.pyc *.so .coverage +.mypy_cache .tox build/ compliance/reports/ diff --git a/Makefile b/Makefile index 9fa5c2422..f94a9103c 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,7 @@ style: isort --recursive src tests black src tests flake8 src tests + mypy src test: python -W default -m unittest diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index f0b126934..5cc1bcc90 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -146,11 +146,11 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): """ - # There are only two differences between the client-side and the server- - # side behavior: masking the payload and closing the underlying TCP - # connection. Set is_client and side to pick a side. - is_client = None - side = "undefined" + # There are only two differences between the client-side and server-side + # behavior: masking the payload and closing the underlying TCP connection. + # Set is_client = True/False and side = "client"/"server" to pick a side. + is_client: bool + side: str = "undefined" def __init__( self, diff --git a/src/websockets/server.py b/src/websockets/server.py index 007ebd725..7fd32ba1e 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -69,10 +69,8 @@ def __init__( self.available_extensions = extensions self.available_subprotocols = subprotocols self.extra_headers = extra_headers - if process_request is not None: - self.process_request = process_request - if select_subprotocol is not None: - self.select_subprotocol = select_subprotocol + self._process_request = process_request + self._select_subprotocol = select_subprotocol super().__init__(**kwds) def connection_made(self, transport): @@ -264,6 +262,8 @@ def process_request(self, path, request_headers): function. """ + if self._process_request is not None: + return self._process_request(path, request_headers) @staticmethod def process_origin(headers, origins=None): @@ -414,6 +414,9 @@ def select_subprotocol(self, client_subprotocols, server_subprotocols): :func:`serve` function. """ + if self._select_subprotocol is not None: + return self._select_subprotocol(client_subprotocols, server_subprotocols) + subprotocols = set(client_subprotocols) & set(server_subprotocols) if not subprotocols: return None diff --git a/src/websockets/speedups.pyi b/src/websockets/speedups.pyi new file mode 100644 index 000000000..821438a06 --- /dev/null +++ b/src/websockets/speedups.pyi @@ -0,0 +1 @@ +def apply_mask(data: bytes, mask: bytes) -> bytes: ... diff --git a/tox.ini b/tox.ini index 238fcd649..4d085f56c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36,py37,coverage,black,flake8,isort +envlist = py36,py37,coverage,black,flake8,isort,mypy [testenv] commands = python -W default -m unittest {posargs} @@ -22,3 +22,7 @@ deps = flake8 [testenv:isort] commands = isort --check-only --recursive src tests deps = isort + +[testenv:mypy] +commands = mypy src +deps = mypy From 94945fec3280c1c6d04aa4c9dc41a794f1ef1a42 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 19 Jan 2019 21:00:06 +0100 Subject: [PATCH 051/281] Annotate source code with type hints. Fix a few minor bugs revealed by static typing. This required adjustments where the code wasn't statically typable, usally because a variable was reused with a different type. --- Makefile | 2 +- docs/changelog.rst | 2 + src/websockets/__main__.py | 79 ++-- src/websockets/client.py | 121 +++-- src/websockets/exceptions.py | 45 +- src/websockets/extensions/base.py | 88 ++-- .../extensions/permessage_deflate.py | 347 +++++++------- src/websockets/framing.py | 82 +++- src/websockets/handshake.py | 20 +- src/websockets/headers.py | 55 ++- src/websockets/http.py | 94 ++-- src/websockets/protocol.py | 237 +++++----- src/websockets/server.py | 243 ++++++---- src/websockets/uri.py | 50 +- src/websockets/utils.py | 2 +- tests/extensions/test_permessage_deflate.py | 438 +++++++++--------- tests/test_exceptions.py | 2 +- tests/test_http.py | 3 + tox.ini | 2 +- 19 files changed, 1088 insertions(+), 824 deletions(-) diff --git a/Makefile b/Makefile index f94a9103c..30dbfd9c1 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ style: isort --recursive src tests black src tests flake8 src tests - mypy src + mypy --strict src test: python -W default -m unittest diff --git a/docs/changelog.rst b/docs/changelog.rst index b53080501..1c4b1bc96 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,6 +28,8 @@ Also: * :func:`~client.connect()` handles redirects from the server during the handshake. +* Added type hints (:pep:`484`). + * Added documentation for extensions. * Documented how to optimize memory usage. diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index f438750c9..604caa5e4 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -4,42 +4,47 @@ import signal import sys import threading +from typing import Any, Set import websockets from websockets.exceptions import format_close -def win_enable_vt100(): - """ - Enable VT-100 for console output on Windows. +if sys.platform == "win32": - See also https://bugs.python.org/issue29059. + def win_enable_vt100() -> None: + """ + Enable VT-100 for console output on Windows. - """ - import ctypes + See also https://bugs.python.org/issue29059. - STD_OUTPUT_HANDLE = ctypes.c_uint(-11) - INVALID_HANDLE_VALUE = ctypes.c_uint(-1) - ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004 + """ + import ctypes - handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) - if handle == INVALID_HANDLE_VALUE: - raise RuntimeError("Unable to obtain stdout handle") + STD_OUTPUT_HANDLE = ctypes.c_uint(-11) + INVALID_HANDLE_VALUE = ctypes.c_uint(-1) + ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004 - cur_mode = ctypes.c_uint() - if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0: - raise RuntimeError("Unable to query current console mode") + handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) + if handle == INVALID_HANDLE_VALUE: + raise RuntimeError("Unable to obtain stdout handle") - # ctypes ints lack support for the required bit-OR operation. - # Temporarily convert to Py int, do the OR and convert back. - py_int_mode = int.from_bytes(cur_mode, sys.byteorder) - new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING) + cur_mode = ctypes.c_uint() + if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0: + raise RuntimeError("Unable to query current console mode") - if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0: - raise RuntimeError("Unable to set console mode") + # ctypes ints lack support for the required bit-OR operation. + # Temporarily convert to Py int, do the OR and convert back. + py_int_mode = int.from_bytes(cur_mode, sys.byteorder) + new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING) + if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0: + raise RuntimeError("Unable to set console mode") -def exit_from_event_loop_thread(loop, stop): + +def exit_from_event_loop_thread( + loop: asyncio.AbstractEventLoop, stop: asyncio.Future[None] +) -> None: loop.stop() if not stop.done(): # When exiting the thread that runs the event loop, raise @@ -51,7 +56,7 @@ def exit_from_event_loop_thread(loop, stop): os.kill(os.getpid(), ctrl_c) -def print_during_input(string): +def print_during_input(string: str) -> None: sys.stdout.write( # Save cursor position "\N{ESC}7" @@ -71,7 +76,7 @@ def print_during_input(string): sys.stdout.flush() -def print_over_input(string): +def print_over_input(string: str) -> None: sys.stdout.write( # Move cursor to beginning of line "\N{CARRIAGE RETURN}" @@ -83,7 +88,12 @@ def print_over_input(string): sys.stdout.flush() -async def run_client(uri, loop, inputs, stop): +async def run_client( + uri: str, + loop: asyncio.AbstractEventLoop, + inputs: asyncio.Queue[str], + stop: asyncio.Future[None], +) -> None: try: websocket = await websockets.connect(uri) except Exception as exc: @@ -95,8 +105,10 @@ async def run_client(uri, loop, inputs, stop): try: while True: - incoming = asyncio.ensure_future(websocket.recv()) - outgoing = asyncio.ensure_future(inputs.get()) + incoming: asyncio.Future[Any] = asyncio.ensure_future(websocket.recv()) + outgoing: asyncio.Future[Any] = asyncio.ensure_future(inputs.get()) + done: Set[asyncio.Future[Any]] + pending: Set[asyncio.Future[Any]] done, pending = await asyncio.wait( [incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED ) @@ -113,7 +125,10 @@ async def run_client(uri, loop, inputs, stop): except websockets.ConnectionClosed: break else: - print_during_input("< " + message) + if isinstance(message, str): + print_during_input("< " + message) + else: + print_during_input("< (binary) " + message.hex()) if outgoing in done: message = outgoing.result() @@ -131,9 +146,9 @@ async def run_client(uri, loop, inputs, stop): exit_from_event_loop_thread(loop, stop) -def main(): +def main() -> None: # If we're on Windows, enable VT100 terminal support. - if os.name == "nt": + if sys.platform == "win32": try: win_enable_vt100() except RuntimeError as exc: @@ -162,10 +177,10 @@ def main(): loop = asyncio.new_event_loop() # Create a queue of user inputs. There's no need to limit its size. - inputs = asyncio.Queue(loop=loop) + inputs: asyncio.Queue[str] = asyncio.Queue(loop=loop) # Create a stop condition when receiving SIGINT or SIGTERM. - stop = loop.create_future() + stop: asyncio.Future[None] = loop.create_future() # Schedule the task that will manage the connection. asyncio.ensure_future(run_client(args.uri, loop, inputs, stop), loop=loop) diff --git a/src/websockets/client.py b/src/websockets/client.py index 57fd33b25..40c5b0073 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -6,6 +6,8 @@ import asyncio import collections.abc import logging +from types import TracebackType +from typing import Any, Generator, List, Optional, Tuple, Type, cast from .exceptions import ( InvalidHandshake, @@ -14,18 +16,20 @@ NegotiationError, RedirectHandshake, ) +from .extensions.base import ClientExtensionFactory, Extension from .extensions.permessage_deflate import ClientPerMessageDeflateFactory from .handshake import build_request, check_response from .headers import ( + ExtensionHeader, build_basic_auth, build_extension_list, build_subprotocol_list, parse_extension_list, parse_subprotocol_list, ) -from .http import USER_AGENT, Headers, read_response +from .http import USER_AGENT, Headers, HeadersLike, read_response from .protocol import WebSocketCommonProtocol -from .uri import parse_uri +from .uri import WebSocketURI, parse_uri __all__ = ["connect", "WebSocketClientProtocol"] @@ -48,19 +52,19 @@ class WebSocketClientProtocol(WebSocketCommonProtocol): def __init__( self, *, - origin=None, - extensions=None, - subprotocols=None, - extra_headers=None, - **kwds, - ): + origin: Optional[str] = None, + extensions: Optional[List[ClientExtensionFactory]] = None, + subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLike] = None, + **kwds: Any, + ) -> None: self.origin = origin self.available_extensions = extensions self.available_subprotocols = subprotocols self.extra_headers = extra_headers super().__init__(**kwds) - def write_http_request(self, path, headers): + def write_http_request(self, path: str, headers: Headers) -> None: """ Write request line and headers to the HTTP request. @@ -78,7 +82,7 @@ def write_http_request(self, path, headers): self.writer.write(request.encode()) - async def read_http_response(self): + async def read_http_response(self) -> Tuple[int, Headers]: """ Read status line and headers from the HTTP response. @@ -103,7 +107,9 @@ async def read_http_response(self): return status_code, self.response_headers @staticmethod - def process_extensions(headers, available_extensions): + def process_extensions( + headers: Headers, available_extensions: Optional[List[ClientExtensionFactory]] + ) -> List[Extension]: """ Handle the Sec-WebSocket-Extensions HTTP response header. @@ -130,7 +136,7 @@ def process_extensions(headers, available_extensions): order of extensions, may be implemented by overriding this method. """ - accepted_extensions = [] + accepted_extensions: List[Extension] = [] header_values = headers.get_all("Sec-WebSocket-Extensions") @@ -139,7 +145,7 @@ def process_extensions(headers, available_extensions): if available_extensions is None: raise InvalidHandshake("No extensions supported") - parsed_header_values = sum( + parsed_header_values: List[ExtensionHeader] = sum( [parse_extension_list(header_value) for header_value in header_values], [], ) @@ -177,7 +183,9 @@ def process_extensions(headers, available_extensions): return accepted_extensions @staticmethod - def process_subprotocol(headers, available_subprotocols): + def process_subprotocol( + headers: Headers, available_subprotocols: Optional[List[str]] + ) -> Optional[str]: """ Handle the Sec-WebSocket-Protocol HTTP response header. @@ -186,7 +194,7 @@ def process_subprotocol(headers, available_subprotocols): Return the selected subprotocol. """ - subprotocol = None + subprotocol: Optional[str] = None header_values = headers.get_all("Sec-WebSocket-Protocol") @@ -195,7 +203,7 @@ def process_subprotocol(headers, available_subprotocols): if available_subprotocols is None: raise InvalidHandshake("No subprotocols supported") - parsed_header_values = sum( + parsed_header_values: List[str] = sum( [ parse_subprotocol_list(header_value) for header_value in header_values @@ -216,12 +224,12 @@ def process_subprotocol(headers, available_subprotocols): async def handshake( self, - wsuri, - origin=None, - available_extensions=None, - available_subprotocols=None, - extra_headers=None, - ): + wsuri: WebSocketURI, + origin: Optional[str] = None, + available_extensions: Optional[List[ClientExtensionFactory]] = None, + available_subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLike] = None, + ) -> None: """ Perform the client side of the opening handshake. @@ -359,26 +367,26 @@ class Connect: def __init__( self, - uri, + uri: str, *, - create_protocol=None, - ping_interval=20, - ping_timeout=20, - close_timeout=None, - max_size=2 ** 20, - max_queue=2 ** 5, - read_limit=2 ** 16, - write_limit=2 ** 16, - loop=None, - legacy_recv=False, - klass=WebSocketClientProtocol, - timeout=10, - compression="deflate", - origin=None, - extensions=None, - subprotocols=None, - extra_headers=None, - **kwds, + create_protocol: Optional[Type[WebSocketClientProtocol]] = None, + ping_interval: float = 20, + ping_timeout: float = 20, + close_timeout: Optional[float] = None, + max_size: int = 2 ** 20, + max_queue: int = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + legacy_recv: bool = False, + klass: Type[WebSocketClientProtocol] = WebSocketClientProtocol, + timeout: float = 10, + compression: Optional[str] = "deflate", + origin: Optional[str] = None, + extensions: Optional[List[ClientExtensionFactory]] = None, + subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLike] = None, + **kwds: Any, ): if loop is None: loop = asyncio.get_event_loop() @@ -434,7 +442,9 @@ def __init__( self._extra_headers = extra_headers self._kwds = kwds - def _creating_connection(self): + async def _creating_connection( + self + ) -> Tuple[asyncio.Transport, WebSocketClientProtocol]: if self._wsuri.secure: self._kwds.setdefault("ssl", True) @@ -457,6 +467,8 @@ def _creating_connection(self): extra_headers=self._extra_headers, ) + host: Optional[str] + port: Optional[int] if self._kwds.get("sock") is None: host, port = self._wsuri.host, self._wsuri.port else: @@ -467,19 +479,30 @@ def _creating_connection(self): self._origin = self._origin # This is a coroutine object. - return self._loop.create_connection(factory, host, port, **self._kwds) + # https://github.com/python/typeshed/pull/2756 + transport, protocol = await self._loop.create_connection( # type: ignore + factory, host, port, **self._kwds + ) + transport = cast(asyncio.Transport, transport) + protocol = cast(WebSocketClientProtocol, protocol) + return transport, protocol @asyncio.coroutine - def __iter__(self): - return self.__await_impl__() + def __iter__(self) -> Generator[Any, None, WebSocketClientProtocol]: + return (yield from self.__await__()) - async def __aenter__(self): + async def __aenter__(self) -> WebSocketClientProtocol: return await self - async def __aexit__(self, exc_type, exc_value, traceback): + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: await self.ws_client.close() - async def __await_impl__(self): + async def __await_impl__(self) -> WebSocketClientProtocol: for redirects in range(self.MAX_REDIRECTS_ALLOWED): transport, protocol = await self._creating_connection() @@ -508,7 +531,7 @@ async def __await_impl__(self): self.ws_client = protocol return protocol - def __await__(self): + def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: # __await__() must return a type that I don't know how to obtain except # by calling __await__() on the return value of an async function. # I'm not finding a better way to take advantage of PEP 492. diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 50f3ab373..9999527ef 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -1,3 +1,15 @@ +import http +from typing import TYPE_CHECKING, Any, Optional + +from .http import Headers, HeadersLike + + +if TYPE_CHECKING: # pragma: no cover + from .uri import WebSocketURI +else: + WebSocketURI = Any + + __all__ = [ "AbortHandshake", "ConnectionClosed", @@ -33,11 +45,13 @@ class AbortHandshake(InvalidHandshake): """ - def __init__(self, status, headers, body=b""): + def __init__( + self, status: http.HTTPStatus, headers: HeadersLike, body: bytes = b"" + ) -> None: self.status = status - self.headers = headers + self.headers = Headers(headers) self.body = body - message = f"HTTP {status}, {len(headers)} headers, {len(body)} bytes" + message = f"HTTP {status}, {len(self.headers)} headers, {len(body)} bytes" super().__init__(message) @@ -47,7 +61,7 @@ class RedirectHandshake(InvalidHandshake): """ - def __init__(self, wsuri): + def __init__(self, wsuri: WebSocketURI) -> None: self.wsuri = wsuri @@ -64,7 +78,7 @@ class InvalidHeader(InvalidHandshake): """ - def __init__(self, name, value=None): + def __init__(self, name: str, value: Optional[str] = None) -> None: if value is None: message = f"Missing {name} header" elif value == "": @@ -80,7 +94,7 @@ class InvalidHeaderFormat(InvalidHeader): """ - def __init__(self, name, error, string, pos): + def __init__(self, name: str, error: str, string: str, pos: int) -> None: error = f"{error} at {pos} in {string}" super().__init__(name, error) @@ -105,7 +119,7 @@ class InvalidOrigin(InvalidHeader): """ - def __init__(self, origin): + def __init__(self, origin: Optional[str]) -> None: super().__init__("Origin", origin) @@ -117,7 +131,7 @@ class InvalidStatusCode(InvalidHandshake): """ - def __init__(self, status_code): + def __init__(self, status_code: int) -> None: self.status_code = status_code message = f"Status code not 101: {status_code}" super().__init__(message) @@ -136,7 +150,7 @@ class InvalidParameterName(NegotiationError): """ - def __init__(self, name): + def __init__(self, name: str) -> None: self.name = name message = f"Invalid parameter name: {name}" super().__init__(message) @@ -148,7 +162,7 @@ class InvalidParameterValue(NegotiationError): """ - def __init__(self, name, value): + def __init__(self, name: str, value: Optional[str]) -> None: self.name = name self.value = value message = f"Invalid value for parameter {name}: {value}" @@ -161,7 +175,7 @@ class DuplicateParameter(NegotiationError): """ - def __init__(self, name): + def __init__(self, name: str) -> None: self.name = name message = f"Duplicate parameter: {name}" super().__init__(message) @@ -191,7 +205,7 @@ class InvalidState(Exception): } -def format_close(code, reason): +def format_close(code: int, reason: str) -> str: """ Display a human-readable version of the close code and reason. @@ -222,7 +236,7 @@ class ConnectionClosed(InvalidState): """ - def __init__(self, code, reason): + def __init__(self, code: int, reason: str) -> None: self.code = code self.reason = reason message = "WebSocket connection is closed: " @@ -236,6 +250,11 @@ class InvalidURI(Exception): """ + def __init__(self, uri: str) -> None: + self.uri = uri + message = "{} isn't a valid URI".format(uri) + super().__init__(message) + class PayloadTooBig(Exception): """ diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index cf3f9a2ec..707e9317a 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -6,6 +6,46 @@ """ +from typing import List, Optional, Tuple + +from ..framing import Frame +from ..headers import ExtensionParameters + + +__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] + + +class Extension: + """ + Abstract class for extensions. + + """ + + @property + def name(self) -> str: + """ + Extension identifier. + + """ + + def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: + """ + Decode an incoming frame. + + The ``frame`` parameter and the return value are + :class:`~websockets.framing.Frame` instances. + + """ + + def encode(self, frame: Frame) -> Frame: + """ + Encode an outgoing frame. + + The ``frame`` parameter and the return value are + :class:`~websockets.framing.Frame` instances. + + """ + class ClientExtensionFactory: """ @@ -14,13 +54,13 @@ class ClientExtensionFactory: """ @property - def name(self): + def name(self) -> str: """ Extension identifier. """ - def get_request_params(self): + def get_request_params(self) -> ExtensionParameters: """ Build request parameters. @@ -28,7 +68,9 @@ def get_request_params(self): """ - def process_response_params(self, params, accepted_extensions): + def process_response_params( + self, params: ExtensionParameters, accepted_extensions: List[Extension] + ) -> Extension: """ Process response parameters received from the server. @@ -51,13 +93,15 @@ class ServerExtensionFactory: """ @property - def name(self): + def name(self) -> str: """ Extension identifier. """ - def process_request_params(self, params, accepted_extensions): + def process_request_params( + self, params: ExtensionParameters, accepted_extensions: List[Extension] + ) -> Tuple[ExtensionParameters, Extension]: """ Process request parameters received from the client. @@ -74,37 +118,3 @@ def process_request_params(self, params, accepted_extensions): :exc:`~websockets.exceptions.NegotiationError`. """ - - -class Extension: - """ - Abstract class for extensions. - - """ - - @property - def name(self): - """ - Extension identifier. - - """ - - def decode(self, frame, *, max_size=None): - """ - Decode an incoming frame. - - The ``frame`` parameter and the return value are - :class:`~websockets.framing.Frame` instances. - - - - """ - - def encode(self, frame): - """ - Encode an outgoing frame. - - The ``frame`` parameter and the return value are - :class:`~websockets.framing.Frame` instances. - - """ diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 2c2be49bd..93698a363 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -5,6 +5,7 @@ """ import zlib +from typing import Any, Dict, List, Optional, Tuple, Union from ..exceptions import ( DuplicateParameter, @@ -13,13 +14,15 @@ NegotiationError, PayloadTooBig, ) -from ..framing import CTRL_OPCODES, OP_CONT +from ..framing import CTRL_OPCODES, OP_CONT, Frame +from ..headers import ExtensionParameters +from .base import ClientExtensionFactory, Extension, ServerExtensionFactory __all__ = [ + "PerMessageDeflate", "ClientPerMessageDeflateFactory", "ServerPerMessageDeflateFactory", - "PerMessageDeflate", ] _EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff" @@ -27,17 +30,156 @@ _MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)] +class PerMessageDeflate(Extension): + """ + Per-Message Deflate extension. + + """ + + name = "permessage-deflate" + + def __init__( + self, + remote_no_context_takeover: bool, + local_no_context_takeover: bool, + remote_max_window_bits: int, + local_max_window_bits: int, + compress_settings: Optional[Dict[Any, Any]] = None, + ): + """ + Configure the Per-Message Deflate extension. + + """ + if compress_settings is None: + compress_settings = {} + + assert remote_no_context_takeover in [False, True] + assert local_no_context_takeover in [False, True] + assert 8 <= remote_max_window_bits <= 15 + assert 8 <= local_max_window_bits <= 15 + assert "wbits" not in compress_settings + + self.remote_no_context_takeover = remote_no_context_takeover + self.local_no_context_takeover = local_no_context_takeover + self.remote_max_window_bits = remote_max_window_bits + self.local_max_window_bits = local_max_window_bits + self.compress_settings = compress_settings + + if not self.remote_no_context_takeover: + self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) + + if not self.local_no_context_takeover: + self.encoder = zlib.compressobj( + wbits=-self.local_max_window_bits, **self.compress_settings + ) + + # To handle continuation frames properly, we must keep track of + # whether that initial frame was encoded. + self.decode_cont_data = False + # There's no need for self.encode_cont_data because we always encode + # outgoing frames, so it would always be True. + + def __repr__(self) -> str: + return ( + f"PerMessageDeflate(" + f"remote_no_context_takeover={self.remote_no_context_takeover}, " + f"local_no_context_takeover={self.local_no_context_takeover}, " + f"remote_max_window_bits={self.remote_max_window_bits}, " + f"local_max_window_bits={self.local_max_window_bits})" + ) + + def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: + """ + Decode an incoming frame. + + """ + # Skip control frames. + if frame.opcode in CTRL_OPCODES: + return frame + + # Handle continuation data frames: + # - skip if the initial data frame wasn't encoded + # - reset "decode continuation data" flag if it's a final frame + if frame.opcode == OP_CONT: + if not self.decode_cont_data: + return frame + if frame.fin: + self.decode_cont_data = False + + # Handle text and binary data frames: + # - skip if the frame isn't encoded + # - set "decode continuation data" flag if it's a non-final frame + else: + if not frame.rsv1: + return frame + if not frame.fin: # frame.rsv1 is True at this point + self.decode_cont_data = True + + # Re-initialize per-message decoder. + if self.remote_no_context_takeover: + self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) + + # Uncompress compressed frames. Protect against zip bombs by + # preventing zlib from decompressing more than max_length bytes + # (except when the limit is disabled with max_size = None). + data = frame.data + if frame.fin: + data += _EMPTY_UNCOMPRESSED_BLOCK + max_length = 0 if max_size is None else max_size + data = self.decoder.decompress(data, max_length) + if self.decoder.unconsumed_tail: + raise PayloadTooBig( + f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)" + ) + + # Allow garbage collection of the decoder if it won't be reused. + if frame.fin and self.remote_no_context_takeover: + del self.decoder + + return frame._replace(data=data, rsv1=False) + + def encode(self, frame: Frame) -> Frame: + """ + Encode an outgoing frame. + + """ + # Skip control frames. + if frame.opcode in CTRL_OPCODES: + return frame + + # Since we always encode and never fragment messages, there's no logic + # similar to decode() here at this time. + + if frame.opcode != OP_CONT: + # Re-initialize per-message decoder. + if self.local_no_context_takeover: + self.encoder = zlib.compressobj( + wbits=-self.local_max_window_bits, **self.compress_settings + ) + + # Compress data frames. + data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH) + if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK): + data = data[:-4] + + # Allow garbage collection of the encoder if it won't be reused. + if frame.fin and self.local_no_context_takeover: + del self.encoder + + return frame._replace(data=data, rsv1=True) + + def _build_parameters( - server_no_context_takeover, - client_no_context_takeover, - server_max_window_bits, - client_max_window_bits, -): + server_no_context_takeover: bool, + client_no_context_takeover: bool, + server_max_window_bits: Optional[int], + client_max_window_bits: Optional[Union[int, bool]], +) -> ExtensionParameters: """ Build a list of ``(name, value)`` pairs for some compression parameters. """ - params = [] + params: ExtensionParameters = [] if server_no_context_takeover: params.append(("server_no_context_takeover", None)) if client_no_context_takeover: @@ -51,7 +193,9 @@ def _build_parameters( return params -def _extract_parameters(params, *, is_server): +def _extract_parameters( + params: ExtensionParameters, *, is_server: bool +) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]: """ Extract compression parameters from a list of ``(name, value)`` pairs. @@ -59,10 +203,10 @@ def _extract_parameters(params, *, is_server): without a value. This is only allow in handshake requests. """ - server_no_context_takeover = False - client_no_context_takeover = False - server_max_window_bits = None - client_max_window_bits = None + server_no_context_takeover: bool = False + client_no_context_takeover: bool = False + server_max_window_bits: Optional[int] = None + client_max_window_bits: Optional[Union[int, bool]] = None for name, value in params: @@ -111,7 +255,7 @@ def _extract_parameters(params, *, is_server): ) -class ClientPerMessageDeflateFactory: +class ClientPerMessageDeflateFactory(ClientExtensionFactory): """ Client-side extension factory for Per-Message Deflate extension. @@ -136,11 +280,11 @@ class ClientPerMessageDeflateFactory: def __init__( self, - server_no_context_takeover=False, - client_no_context_takeover=False, - server_max_window_bits=None, - client_max_window_bits=None, - compress_settings=None, + server_no_context_takeover: bool = False, + client_no_context_takeover: bool = False, + server_max_window_bits: Optional[int] = None, + client_max_window_bits: Optional[Union[int, bool]] = None, + compress_settings: Optional[Dict[Any, Any]] = None, ): """ Configure the Per-Message Deflate extension factory. @@ -166,7 +310,7 @@ def __init__( self.client_max_window_bits = client_max_window_bits self.compress_settings = compress_settings - def get_request_params(self): + def get_request_params(self) -> ExtensionParameters: """ Build request parameters. @@ -178,7 +322,11 @@ def get_request_params(self): self.client_max_window_bits, ) - def process_response_params(self, params, accepted_extensions): + def process_response_params( + self, + params: List[Tuple[str, Optional[str]]], + accepted_extensions: List["Extension"], + ) -> PerMessageDeflate: """ Process response parameters. @@ -280,7 +428,7 @@ def process_response_params(self, params, accepted_extensions): ) -class ServerPerMessageDeflateFactory: +class ServerPerMessageDeflateFactory(ServerExtensionFactory): """ Server-side extension factory for the Per-Message Deflate extension. @@ -305,11 +453,11 @@ class ServerPerMessageDeflateFactory: def __init__( self, - server_no_context_takeover=False, - client_no_context_takeover=False, - server_max_window_bits=None, - client_max_window_bits=None, - compress_settings=None, + server_no_context_takeover: bool = False, + client_no_context_takeover: bool = False, + server_max_window_bits: Optional[int] = None, + client_max_window_bits: Optional[int] = None, + compress_settings: Optional[Dict[Any, Any]] = None, ): """ Configure the Per-Message Deflate extension factory. @@ -331,7 +479,11 @@ def __init__( self.client_max_window_bits = client_max_window_bits self.compress_settings = compress_settings - def process_request_params(self, params, accepted_extensions): + def process_request_params( + self, + params: List[Tuple[str, Optional[str]]], + accepted_extensions: List["Extension"], + ) -> Tuple[ExtensionParameters, PerMessageDeflate]: """ Process request parameters. @@ -438,142 +590,3 @@ def process_request_params(self, params, accepted_extensions): self.compress_settings, ), ) - - -class PerMessageDeflate: - """ - Per-Message Deflate extension. - - """ - - name = "permessage-deflate" - - def __init__( - self, - remote_no_context_takeover, - local_no_context_takeover, - remote_max_window_bits, - local_max_window_bits, - compress_settings=None, - ): - """ - Configure the Per-Message Deflate extension. - - """ - if compress_settings is None: - compress_settings = {} - - assert remote_no_context_takeover in [False, True] - assert local_no_context_takeover in [False, True] - assert 8 <= remote_max_window_bits <= 15 - assert 8 <= local_max_window_bits <= 15 - assert "wbits" not in compress_settings - - self.remote_no_context_takeover = remote_no_context_takeover - self.local_no_context_takeover = local_no_context_takeover - self.remote_max_window_bits = remote_max_window_bits - self.local_max_window_bits = local_max_window_bits - self.compress_settings = compress_settings - - if not self.remote_no_context_takeover: - self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) - - if not self.local_no_context_takeover: - self.encoder = zlib.compressobj( - wbits=-self.local_max_window_bits, **self.compress_settings - ) - - # To handle continuation frames properly, we must keep track of - # whether that initial frame was encoded. - self.decode_cont_data = False - # There's no need for self.encode_cont_data because we always encode - # outgoing frames, so it would always be True. - - def __repr__(self): - return ( - f"PerMessageDeflate(" - f"remote_no_context_takeover={self.remote_no_context_takeover}, " - f"local_no_context_takeover={self.local_no_context_takeover}, " - f"remote_max_window_bits={self.remote_max_window_bits}, " - f"local_max_window_bits={self.local_max_window_bits})" - ) - - def decode(self, frame, *, max_size=None): - """ - Decode an incoming frame. - - """ - # Skip control frames. - if frame.opcode in CTRL_OPCODES: - return frame - - # Handle continuation data frames: - # - skip if the initial data frame wasn't encoded - # - reset "decode continuation data" flag if it's a final frame - if frame.opcode == OP_CONT: - if not self.decode_cont_data: - return frame - if frame.fin: - self.decode_cont_data = False - - # Handle text and binary data frames: - # - skip if the frame isn't encoded - # - set "decode continuation data" flag if it's a non-final frame - else: - if not frame.rsv1: - return frame - if not frame.fin: # frame.rsv1 is True at this point - self.decode_cont_data = True - - # Re-initialize per-message decoder. - if self.remote_no_context_takeover: - self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) - - # Uncompress compressed frames. Protect against zip bombs by - # preventing zlib from decompressing more than max_length bytes - # (except when the limit is disabled with max_size = None). - data = frame.data - if frame.fin: - data += _EMPTY_UNCOMPRESSED_BLOCK - max_length = 0 if max_size is None else max_size - data = self.decoder.decompress(data, max_length) - if self.decoder.unconsumed_tail: - raise PayloadTooBig( - f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)" - ) - - # Allow garbage collection of the decoder if it won't be reused. - if frame.fin and self.remote_no_context_takeover: - self.decoder = None - - return frame._replace(data=data, rsv1=False) - - def encode(self, frame): - """ - Encode an outgoing frame. - - """ - # Skip control frames. - if frame.opcode in CTRL_OPCODES: - return frame - - # Since we always encode and never fragment messages, there's no logic - # similar to decode() here at this time. - - if frame.opcode != OP_CONT: - # Re-initialize per-message decoder. - if self.local_no_context_takeover: - self.encoder = zlib.compressobj( - wbits=-self.local_max_window_bits, **self.compress_settings - ) - - # Compress data frames. - data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH) - if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK): - data = data[:-4] - - # Allow garbage collection of the encoder if it won't be reused. - if frame.fin and self.local_no_context_takeover: - self.encoder = None - - return frame._replace(data=data, rsv1=True) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 0abe8f8db..8eb1a79bd 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -9,14 +9,29 @@ """ -import collections import io import random import struct +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + List, + NamedTuple, + Optional, + Tuple, + Union, +) from .exceptions import PayloadTooBig, WebSocketProtocolError +if TYPE_CHECKING: # pragma: no cover + from .extensions.base import Extension +else: + Extension = Any + try: from .speedups import apply_mask except ImportError: # pragma: no cover @@ -46,8 +61,24 @@ # Using a list optimizes `code in EXTERNAL_CLOSE_CODES`. EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] -FrameData = collections.namedtuple( - "FrameData", ["fin", "opcode", "data", "rsv1", "rsv2", "rsv3"] + +Data = Union[str, bytes] + + +# Switch to class-based syntax when dropping support for Python < 3.6. + +# Convert to a dataclass when dropping support for Python < 3.7. + +FrameData = NamedTuple( + "FrameData", + [ + ("fin", bool), + ("opcode", int), + ("data", bytes), + ("rsv1", bool), + ("rsv2", bool), + ("rsv3", bool), + ], ) @@ -68,11 +99,26 @@ class Frame(FrameData): """ - def __new__(cls, fin, opcode, data, rsv1=False, rsv2=False, rsv3=False): + def __new__( + cls, + fin: bool, + opcode: int, + data: bytes, + rsv1: bool = False, + rsv2: bool = False, + rsv3: bool = False, + ) -> "Frame": return FrameData.__new__(cls, fin, opcode, data, rsv1, rsv2, rsv3) @classmethod - async def read(cls, reader, *, mask, max_size=None, extensions=None): + async def read( + cls, + reader: Callable[[int], Awaitable[bytes]], + *, + mask: bool, + max_size: Optional[int] = None, + extensions: Optional[List[Extension]] = None, + ) -> "Frame": """ Read a WebSocket frame and return a :class:`Frame` object. @@ -138,7 +184,13 @@ async def read(cls, reader, *, mask, max_size=None, extensions=None): return frame - def write(frame, writer, *, mask, extensions=None): + def write( + frame, + writer: Callable[[bytes], Any], + *, + mask: bool, + extensions: Optional[List[Extension]] = None, + ) -> None: """ Write a WebSocket frame. @@ -207,7 +259,7 @@ def write(frame, writer, *, mask, extensions=None): # send frames concurrently from multiple coroutines. writer(output.getvalue()) - def check(frame): + def check(frame) -> None: """ Check that this frame contains acceptable values. @@ -232,7 +284,7 @@ def check(frame): raise WebSocketProtocolError(f"Invalid opcode: {frame.opcode}") -def prepare_data(data): +def prepare_data(data: Data) -> Tuple[int, bytes]: """ Convert a string or byte-like object to an opcode and a bytes-like object. @@ -249,7 +301,7 @@ def prepare_data(data): """ if isinstance(data, str): return OP_TEXT, data.encode("utf-8") - elif isinstance(data, collections.abc.ByteString): + elif isinstance(data, (bytes, bytearray)): return OP_BINARY, data elif isinstance(data, memoryview): if data.c_contiguous: @@ -260,11 +312,11 @@ def prepare_data(data): raise TypeError("data must be bytes-like or str") -def encode_data(data): +def encode_data(data: Data) -> bytes: """ Convert a string or byte-like object to bytes. - This function is designed for ping and pong frames. + This function is designed for ping and pon g frames. If ``data`` is a :class:`str`, return a :class:`bytes` object encoding ``data`` in UTF-8. @@ -276,7 +328,7 @@ def encode_data(data): """ if isinstance(data, str): return data.encode("utf-8") - elif isinstance(data, collections.abc.ByteString): + elif isinstance(data, (bytes, bytearray)): return bytes(data) elif isinstance(data, memoryview): return data.tobytes() @@ -284,7 +336,7 @@ def encode_data(data): raise TypeError("data must be bytes-like or str") -def parse_close(data): +def parse_close(data: bytes) -> Tuple[int, str]: """ Parse the data in a close frame. @@ -308,7 +360,7 @@ def parse_close(data): raise WebSocketProtocolError("Close frame too short") -def serialize_close(code, reason): +def serialize_close(code: int, reason: str) -> bytes: """ Serialize the data for a close frame. @@ -319,7 +371,7 @@ def serialize_close(code, reason): return struct.pack("!H", code) + reason.encode("utf-8") -def check_close(code): +def check_close(code: int) -> None: """ Check the close code for a close frame. diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index e6bd61fab..f04d81d59 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -38,7 +38,7 @@ from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade from .headers import parse_connection, parse_upgrade -from .http import MultipleValuesError +from .http import Headers, MultipleValuesError __all__ = ["build_request", "check_request", "build_response", "check_response"] @@ -46,7 +46,7 @@ GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" -def build_request(headers): +def build_request(headers: Headers) -> str: """ Build a handshake request to send to the server. @@ -62,7 +62,7 @@ def build_request(headers): return key -def check_request(headers): +def check_request(headers: Headers) -> str: """ Check a handshake request received from the client. @@ -83,14 +83,14 @@ def check_request(headers): ) if not any(value.lower() == "upgrade" for value in connection): - raise InvalidUpgrade("Connection", connection) + raise InvalidUpgrade("Connection", ", ".join(connection)) upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): - raise InvalidUpgrade("Upgrade", upgrade) + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) try: s_w_key = headers["Sec-WebSocket-Key"] @@ -123,7 +123,7 @@ def check_request(headers): return s_w_key -def build_response(headers, key): +def build_response(headers: Headers, key: str) -> None: """ Build a handshake response to send to the client. @@ -135,7 +135,7 @@ def build_response(headers, key): headers["Sec-WebSocket-Accept"] = accept(key) -def check_response(headers, key): +def check_response(headers: Headers, key: str) -> None: """ Check a handshake response received from the server. @@ -156,14 +156,14 @@ def check_response(headers, key): ) if not any(value.lower() == "upgrade" for value in connection): - raise InvalidUpgrade("Connection", connection) + raise InvalidUpgrade("Connection", " ".join(connection)) upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): - raise InvalidUpgrade("Upgrade", upgrade) + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) try: s_w_accept = headers["Sec-WebSocket-Accept"] @@ -178,6 +178,6 @@ def check_response(headers, key): raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) -def accept(key): +def accept(key: str) -> str: sha1 = hashlib.sha1((key + GUID).encode()).digest() return base64.b64encode(sha1).decode() diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 73f11edce..e2addf4c5 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -9,6 +9,7 @@ import base64 import re +from typing import Callable, List, Optional, Tuple, TypeVar from .exceptions import InvalidHeaderFormat @@ -23,12 +24,19 @@ ] +T = TypeVar("T") + +ExtensionParameter = Tuple[str, Optional[str]] +ExtensionParameters = List[ExtensionParameter] +ExtensionHeader = Tuple[str, ExtensionParameters] +SubprotocolHeader = str + # To avoid a dependency on a parsing library, we implement manually the ABNF # described in https://tools.ietf.org/html/rfc6455#section-9.1 with the # definitions from https://tools.ietf.org/html/rfc7230#appendix-B. -def peek_ahead(string, pos): +def peek_ahead(string: str, pos: int) -> Optional[str]: """ Return the next character from ``string`` at the given position. @@ -43,7 +51,7 @@ def peek_ahead(string, pos): _OWS_re = re.compile(r"[\t ]*") -def parse_OWS(string, pos): +def parse_OWS(string: str, pos: int) -> int: """ Parse optional whitespace from ``string`` at the given position. @@ -54,13 +62,14 @@ def parse_OWS(string, pos): """ # There's always a match, possibly empty, whose content doesn't matter. match = _OWS_re.match(string, pos) + assert match is not None return match.end() _token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") -def parse_token(string, pos, header_name): +def parse_token(string: str, pos: int, header_name: str) -> Tuple[str, int]: """ Parse a token from ``string`` at the given position. @@ -83,7 +92,7 @@ def parse_token(string, pos, header_name): _unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])") -def parse_quoted_string(string, pos, header_name): +def parse_quoted_string(string: str, pos: int, header_name: str) -> Tuple[str, int]: """ Parse a quoted string from ``string`` at the given position. @@ -100,7 +109,12 @@ def parse_quoted_string(string, pos, header_name): return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end() -def parse_list(parse_item, string, pos, header_name): +def parse_list( + parse_item: Callable[[str, int, str], Tuple[T, int]], + string: str, + pos: int, + header_name: str, +) -> List[T]: """ Parse a comma-separated list from ``string`` at the given position. @@ -162,7 +176,7 @@ def parse_list(parse_item, string, pos, header_name): return items -def parse_connection(string): +def parse_connection(string: str) -> List[str]: """ Parse a ``Connection`` header. @@ -179,7 +193,7 @@ def parse_connection(string): ) -def parse_protocol(string, pos, header_name): +def parse_protocol(string: str, pos: int, header_name: str) -> Tuple[str, int]: """ Parse a protocol from ``string`` at the given position. @@ -196,7 +210,7 @@ def parse_protocol(string, pos, header_name): return match.group(), match.end() -def parse_upgrade(string): +def parse_upgrade(string: str) -> List[str]: """ Parse an ``Upgrade`` header. @@ -208,7 +222,9 @@ def parse_upgrade(string): return parse_list(parse_protocol, string, 0, "Upgrade") -def parse_extension_param(string, pos, header_name): +def parse_extension_param( + string: str, pos: int, header_name: str +) -> Tuple[ExtensionParameter, int]: """ Parse a single extension parameter from ``string`` at the given position. @@ -220,7 +236,8 @@ def parse_extension_param(string, pos, header_name): # Extract parameter name. name, pos = parse_token(string, pos, header_name) pos = parse_OWS(string, pos) - # Extract parameter string, if there is one. + # Extract parameter value, if there is one. + value: Optional[str] = None if peek_ahead(string, pos) == "=": pos = parse_OWS(string, pos + 1) if peek_ahead(string, pos) == '"': @@ -238,13 +255,13 @@ def parse_extension_param(string, pos, header_name): else: value, pos = parse_token(string, pos, header_name) pos = parse_OWS(string, pos) - else: - value = None return (name, value), pos -def parse_extension(string, pos, header_name): +def parse_extension( + string: str, pos: int, header_name: str +) -> Tuple[ExtensionHeader, int]: """ Parse an extension definition from ``string`` at the given position. @@ -266,7 +283,7 @@ def parse_extension(string, pos, header_name): return (name, parameters), pos -def parse_extension_list(string): +def parse_extension_list(string: str) -> List[ExtensionHeader]: """ Parse a ``Sec-WebSocket-Extensions`` header. @@ -291,7 +308,7 @@ def parse_extension_list(string): return parse_list(parse_extension, string, 0, "Sec-WebSocket-Extensions") -def build_extension(name, parameters): +def build_extension(name: str, parameters: ExtensionParameters) -> str: """ Build an extension definition. @@ -308,7 +325,7 @@ def build_extension(name, parameters): ) -def build_extension_list(extensions): +def build_extension_list(extensions: List[ExtensionHeader]) -> str: """ Unparse a ``Sec-WebSocket-Extensions`` header. @@ -320,7 +337,7 @@ def build_extension_list(extensions): ) -def parse_subprotocol_list(string): +def parse_subprotocol_list(string: str) -> List[SubprotocolHeader]: """ Parse a ``Sec-WebSocket-Protocol`` header. @@ -330,7 +347,7 @@ def parse_subprotocol_list(string): return parse_list(parse_token, string, 0, "Sec-WebSocket-Protocol") -def build_subprotocol_list(protocols): +def build_subprotocol_list(protocols: List[SubprotocolHeader]) -> str: """ Unparse a ``Sec-WebSocket-Protocol`` header. @@ -340,7 +357,7 @@ def build_subprotocol_list(protocols): return ", ".join(protocols) -def build_basic_auth(username, password): +def build_basic_auth(username: str, password: str) -> str: """ Build an Authorization header for HTTP Basic Auth. diff --git a/src/websockets/http.py b/src/websockets/http.py index ab74614af..f0c58061d 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -7,9 +7,20 @@ """ -import collections.abc +import asyncio import re import sys +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Tuple, + Union, +) from .version import version as websockets_version @@ -48,7 +59,7 @@ _value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") -async def read_request(stream): +async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: """ Read an HTTP/1.1 GET request from ``stream``. @@ -77,20 +88,20 @@ async def read_request(stream): request_line = await read_line(stream) # This may raise "ValueError: not enough values to unpack" - method, path, version = request_line.split(b" ", 2) + method, raw_path, version = request_line.split(b" ", 2) if method != b"GET": raise ValueError("Unsupported HTTP method: %r" % method) if version != b"HTTP/1.1": raise ValueError("Unsupported HTTP version: %r" % version) - path = path.decode("ascii", "surrogateescape") + path = raw_path.decode("ascii", "surrogateescape") headers = await read_headers(stream) return path, headers -async def read_response(stream): +async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Headers"]: """ Read an HTTP/1.1 response from ``stream``. @@ -117,24 +128,24 @@ async def read_response(stream): status_line = await read_line(stream) # This may raise "ValueError: not enough values to unpack" - version, status_code, reason = status_line.split(b" ", 2) + version, raw_status_code, raw_reason = status_line.split(b" ", 2) if version != b"HTTP/1.1": raise ValueError("Unsupported HTTP version: %r" % version) # This may raise "ValueError: invalid literal for int() with base 10" - status_code = int(status_code) + status_code = int(raw_status_code) if not 100 <= status_code < 1000: raise ValueError("Unsupported HTTP status code: %d" % status_code) - if not _value_re.fullmatch(reason): - raise ValueError("Invalid HTTP reason phrase: %r" % reason) - reason = reason.decode() + if not _value_re.fullmatch(raw_reason): + raise ValueError("Invalid HTTP reason phrase: %r" % raw_reason) + reason = raw_reason.decode() headers = await read_headers(stream) return status_code, reason, headers -async def read_headers(stream): +async def read_headers(stream: asyncio.StreamReader) -> "Headers": """ Read HTTP headers from ``stream``. @@ -156,15 +167,15 @@ async def read_headers(stream): break # This may raise "ValueError: not enough values to unpack" - name, value = line.split(b":", 1) - if not _token_re.fullmatch(name): - raise ValueError("Invalid HTTP header name: %r" % name) - value = value.strip(b" \t") - if not _value_re.fullmatch(value): - raise ValueError("Invalid HTTP header value: %r" % value) - - name = name.decode("ascii") # guaranteed to be ASCII at this point - value = value.decode("ascii", "surrogateescape") + raw_name, raw_value = line.split(b":", 1) + if not _token_re.fullmatch(raw_name): + raise ValueError("Invalid HTTP header name: %r" % raw_name) + raw_value = raw_value.strip(b" \t") + if not _value_re.fullmatch(raw_value): + raise ValueError("Invalid HTTP header value: %r" % raw_value) + + name = raw_name.decode("ascii") # guaranteed to be ASCII at this point + value = raw_value.decode("ascii", "surrogateescape") headers[name] = value else: @@ -173,7 +184,7 @@ async def read_headers(stream): return headers -async def read_line(stream): +async def read_line(stream: asyncio.StreamReader) -> bytes: """ Read a single line from ``stream``. @@ -199,14 +210,14 @@ class MultipleValuesError(LookupError): """ - def __str__(self): + def __str__(self) -> str: # Implement the same logic as KeyError_str in Objects/exceptions.c. if len(self.args) == 1: return repr(self.args[0]) return super().__str__() -class Headers(collections.abc.MutableMapping): +class Headers(MutableMapping[str, str]): """ Data structure for working with HTTP headers efficiently. @@ -245,19 +256,19 @@ class Headers(collections.abc.MutableMapping): __slots__ = ["_dict", "_list"] - def __init__(self, *args, **kwargs): - self._dict = {} - self._list = [] + def __init__(self, *args: Any, **kwargs: str) -> None: + self._dict: Dict[str, List[str]] = {} + self._list: List[Tuple[str, str]] = [] # MutableMapping.update calls __setitem__ for each (name, value) pair. self.update(*args, **kwargs) - def __str__(self): + def __str__(self) -> str: return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n" - def __repr__(self): + def __repr__(self) -> str: return f"{self.__class__.__name__}({self._list!r})" - def copy(self): + def copy(self) -> "Headers": copy = self.__class__() copy._dict = self._dict.copy() copy._list = self._list.copy() @@ -265,40 +276,40 @@ def copy(self): # Collection methods - def __contains__(self, key): - return key.lower() in self._dict + def __contains__(self, key: object) -> bool: + return isinstance(key, str) and key.lower() in self._dict - def __iter__(self): + def __iter__(self) -> Iterator[str]: return iter(self._dict) - def __len__(self): + def __len__(self) -> int: return len(self._dict) # MutableMapping methods - def __getitem__(self, key): + def __getitem__(self, key: str) -> str: value = self._dict[key.lower()] if len(value) == 1: return value[0] else: raise MultipleValuesError(key) - def __setitem__(self, key, value): + def __setitem__(self, key: str, value: str) -> None: self._dict.setdefault(key.lower(), []).append(value) self._list.append((key, value)) - def __delitem__(self, key): + def __delitem__(self, key: str) -> None: key_lower = key.lower() self._dict.__delitem__(key_lower) # This is inefficent. Fortunately deleting HTTP headers is uncommon. self._list = [(k, v) for k, v in self._list if k.lower() != key_lower] - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if not isinstance(other, Headers): return NotImplemented return self._list == other._list - def clear(self): + def clear(self) -> None: """ Remove all headers. @@ -308,16 +319,19 @@ def clear(self): # Methods for handling multiple values - def get_all(self, key): + def get_all(self, key: str) -> List[str]: """ Return the (possibly empty) list of all values for a header. """ return self._dict.get(key.lower(), []) - def raw_items(self): + def raw_items(self) -> Iterator[Tuple[str, str]]: """ Return an iterator of (header name, header value). """ return iter(self._list) + + +HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]] diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 5cc1bcc90..b28dcef72 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -10,11 +10,22 @@ import binascii import codecs import collections -import collections.abc import enum import logging import random import struct +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Awaitable, + Deque, + Iterable, + List, + Optional, + Union, + cast, +) from .exceptions import ( ConnectionClosed, @@ -22,8 +33,11 @@ PayloadTooBig, WebSocketProtocolError, ) +from .extensions.base import Extension from .framing import * +from .framing import Data from .handshake import * +from .http import Headers __all__ = ["WebSocketCommonProtocol"] @@ -155,20 +169,20 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): def __init__( self, *, - host=None, - port=None, - secure=None, - ping_interval=20, - ping_timeout=20, - close_timeout=None, - max_size=2 ** 20, - max_queue=2 ** 5, - read_limit=2 ** 16, - write_limit=2 ** 16, - loop=None, - legacy_recv=False, - timeout=10, - ): + host: Optional[str] = None, + port: Optional[int] = None, + secure: Optional[bool] = None, + ping_interval: float = 20, + ping_timeout: float = 20, + close_timeout: Optional[float] = None, + max_size: int = 2 ** 20, + max_queue: int = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + legacy_recv: bool = False, + timeout: float = 10, + ) -> None: # Backwards-compatibility: close_timeout used to be called timeout. # If both are specified, timeout is ignored. if close_timeout is None: @@ -200,8 +214,8 @@ def __init__( stream_reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) super().__init__(stream_reader, self.client_connected, loop) - self.reader = None - self.writer = None + self.reader: asyncio.StreamReader + self.writer: asyncio.StreamWriter self._drain_lock = asyncio.Lock(loop=loop) # This class implements the data transfer and closing handshake, which @@ -212,46 +226,50 @@ def __init__( logger.debug("%s - state = CONNECTING", self.side) # HTTP protocol parameters. - self.path = None - self.request_headers = None - self.response_headers = None + self.path: str + self.request_headers: Headers + self.response_headers: Headers # WebSocket protocol parameters. - self.extensions = [] - self.subprotocol = None + self.extensions: List[Extension] = [] + self.subprotocol: Optional[str] = None # The close code and reason are set when receiving a close frame or # losing the TCP connection. - self.close_code = None - self.close_reason = "" + self.close_code: int + self.close_reason: str # Completed when the connection state becomes CLOSED. Translates the # :meth:`connection_lost()` callback to a :class:`~asyncio.Future` # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are # translated by ``self.stream_reader``). - self.connection_lost_waiter = loop.create_future() + self.connection_lost_waiter: asyncio.Future[None] = loop.create_future() # Queue of received messages. - self.messages = collections.deque() - self._pop_message_waiter = None - self._put_message_waiter = None + self.messages: Deque[Data] = collections.deque() + self._pop_message_waiter: Optional[asyncio.Future[None]] = None + self._put_message_waiter: Optional[asyncio.Future[None]] = None # Mapping of ping IDs to waiters, in chronological order. - self.pings = collections.OrderedDict() + self.pings: collections.OrderedDict[ + bytes, asyncio.Future[None] + ] = collections.OrderedDict() # Task running the data transfer. - self.transfer_data_task = None + self.transfer_data_task: asyncio.Task[None] # Exception that occurred during data transfer, if any. - self.transfer_data_exc = None + self.transfer_data_exc: Optional[BaseException] = None # Task sending keepalive pings. - self.keepalive_ping_task = None + self.keepalive_ping_task: asyncio.Task[None] # Task closing the TCP connection. - self.close_connection_task = None + self.close_connection_task: asyncio.Task[None] - def client_connected(self, reader, writer): + def client_connected( + self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter + ) -> None: """ Callback when the TCP connection is established. @@ -263,7 +281,7 @@ def client_connected(self, reader, writer): self.reader = reader self.writer = writer - def connection_open(self): + def connection_open(self) -> None: """ Callback when the WebSocket opening handshake completes. @@ -284,7 +302,7 @@ def connection_open(self): # Public API @property - def local_address(self): + def local_address(self) -> Any: """ Local address of the connection. @@ -297,7 +315,7 @@ def local_address(self): return self.writer.get_extra_info("sockname") @property - def remote_address(self): + def remote_address(self) -> Any: """ Remote address of the connection. @@ -310,7 +328,7 @@ def remote_address(self): return self.writer.get_extra_info("peername") @property - def open(self): + def open(self) -> bool: """ This property is ``True`` when the connection is usable. @@ -324,7 +342,7 @@ def open(self): return self.state is State.OPEN and not self.transfer_data_task.done() @property - def closed(self): + def closed(self) -> bool: """ This property is ``True`` once the connection is closed. @@ -334,7 +352,7 @@ def closed(self): """ return self.state is State.CLOSED - async def wait_closed(self): + async def wait_closed(self) -> None: """ Wait until the connection is closed. @@ -346,7 +364,7 @@ async def wait_closed(self): """ await asyncio.shield(self.connection_lost_waiter) - async def __aiter__(self): + async def __aiter__(self) -> AsyncIterator[Data]: """ Iterate on received messages. @@ -364,7 +382,7 @@ async def __aiter__(self): else: raise - async def recv(self): + async def recv(self) -> Data: """ This coroutine receives the next message. @@ -405,7 +423,7 @@ async def recv(self): # Wait until there's a message in the queue (if necessary) or the # connection is closed. while len(self.messages) <= 0: - pop_message_waiter = self.loop.create_future() + pop_message_waiter: asyncio.Future[None] = self.loop.create_future() self._pop_message_waiter = pop_message_waiter try: # If asyncio.wait() is canceled, it doesn't cancel @@ -423,7 +441,7 @@ async def recv(self): # exception (or return None if legacy_recv is enabled). if not pop_message_waiter.done(): if self.legacy_recv: - return + return None # type: ignore else: assert self.state in [State.CLOSING, State.CLOSED] # Wait until the connection is closed to raise @@ -440,7 +458,9 @@ async def recv(self): return message - async def send(self, data): + async def send( + self, message: Union[Data, Iterable[Data], AsyncIterable[Data]] + ) -> None: """ This coroutine sends a message. @@ -462,31 +482,30 @@ async def send(self, data): # Unfragmented message -- this case must be handled first because # strings and bytes-like objects are iterable. - try: - opcode, data = prepare_data(data) - except TypeError: - # Perhaps data is an iterator, see below. - pass - else: + if isinstance(message, (str, bytes, bytearray, memoryview)): + opcode, data = prepare_data(message) await self.write_frame(True, opcode, data) - return # Fragmented message -- regular iterator. - if isinstance(data, collections.abc.Iterable): - iter_data = iter(data) + elif isinstance(message, Iterable): + + # Work around https://github.com/python/mypy/issues/6227 + message = cast(Iterable[Data], message) + + iter_message = iter(message) # First fragment. try: - data = next(iter_data) + message_chunk = next(iter_message) except StopIteration: return - opcode, data = prepare_data(data) + opcode, data = prepare_data(message_chunk) await self.write_frame(False, opcode, data) # Other fragments. - for data in iter_data: - confirm_opcode, data = prepare_data(data) + for message_chunk in iter_message: + confirm_opcode, data = prepare_data(message_chunk) if confirm_opcode != opcode: # We're half-way through a fragmented message and we can't # complete it. This makes the connection unusable. @@ -499,22 +518,22 @@ async def send(self, data): # Fragmented message -- asynchronous iterator - elif isinstance(data, collections.abc.AsyncIterable): - # aiter_data = aiter(data) without aiter - aiter_data = type(data).__aiter__(data) + elif isinstance(message, AsyncIterable): + # aiter_message = aiter(message) without aiter + aiter_message = type(message).__aiter__(message) # First fragment. try: - # data = anext(aiter_data) without anext - data = await type(aiter_data).__anext__(aiter_data) + # message_chunk = anext(aiter_message) without anext + message_chunk = await type(aiter_message).__anext__(aiter_message) except StopAsyncIteration: return - opcode, data = prepare_data(data) + opcode, data = prepare_data(message_chunk) await self.write_frame(False, opcode, data) # Other fragments. - async for data in aiter_data: - confirm_opcode, data = prepare_data(data) + async for message_chunk in aiter_message: + confirm_opcode, data = prepare_data(message_chunk) if confirm_opcode != opcode: # We're half-way through a fragmented message and we can't # complete it. This makes the connection unusable. @@ -528,7 +547,7 @@ async def send(self, data): else: raise TypeError("data must be bytes, str, or iterable") - async def close(self, code=1000, reason=""): + async def close(self, code: int = 1000, reason: str = "") -> None: """ This coroutine performs the closing handshake. @@ -577,7 +596,7 @@ async def close(self, code=1000, reason=""): # Wait for the close connection task to close the TCP connection. await asyncio.shield(self.close_connection_task) - async def ping(self, data=None): + async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: """ This coroutine sends a ping. @@ -615,7 +634,7 @@ async def ping(self, data=None): return asyncio.shield(self.pings[data]) - async def pong(self, data=b""): + async def pong(self, data: bytes = b"") -> None: """ This coroutine sends a pong. @@ -634,7 +653,7 @@ async def pong(self, data=b""): # Private methods - no guarantees. - async def ensure_open(self): + async def ensure_open(self) -> None: """ Check that the WebSocket connection is open. @@ -665,8 +684,7 @@ async def ensure_open(self): # will complete within 4 or 5 * close_timeout after close(). The # CLOSING state also occurs when failing the connection. In that # case self.close_connection_task will complete even faster. - if self.close_code is None: - await asyncio.shield(self.close_connection_task) + await asyncio.shield(self.close_connection_task) raise ConnectionClosed( self.close_code, self.close_reason ) from self.transfer_data_exc @@ -675,7 +693,7 @@ async def ensure_open(self): assert self.state is State.CONNECTING raise InvalidState("WebSocket connection isn't established yet") - async def transfer_data(self): + async def transfer_data(self) -> None: """ Read incoming messages and put them in a queue. @@ -742,7 +760,7 @@ async def transfer_data(self): self.transfer_data_exc = exc self.fail_connection(1011) - async def read_message(self): + async def read_message(self) -> Optional[Data]: """ Read a single message from the connection. @@ -755,7 +773,7 @@ async def read_message(self): # A close frame was received. if frame is None: - return + return None if frame.opcode == OP_TEXT: text = True @@ -769,19 +787,21 @@ async def read_message(self): return frame.data.decode("utf-8") if text else frame.data # 5.4. Fragmentation - chunks = [] + chunks: List[Data] = [] max_size = self.max_size if text: - decoder = codecs.getincrementaldecoder("utf-8")(errors="strict") + decoder_factory = codecs.getincrementaldecoder("utf-8") + # https://github.com/python/typeshed/pull/2752 + decoder = decoder_factory(errors="strict") # type: ignore if max_size is None: - def append(frame): + def append(frame: Frame) -> None: nonlocal chunks chunks.append(decoder.decode(frame.data, frame.fin)) else: - def append(frame): + def append(frame: Frame) -> None: nonlocal chunks, max_size chunks.append(decoder.decode(frame.data, frame.fin)) max_size -= len(frame.data) @@ -789,13 +809,13 @@ def append(frame): else: if max_size is None: - def append(frame): + def append(frame: Frame) -> None: nonlocal chunks chunks.append(frame.data) else: - def append(frame): + def append(frame: Frame) -> None: nonlocal chunks, max_size chunks.append(frame.data) max_size -= len(frame.data) @@ -810,9 +830,10 @@ def append(frame): raise WebSocketProtocolError("Unexpected opcode") append(frame) - return ("" if text else b"").join(chunks) + # mypy cannot figure out that chunks have the proper type. + return ("" if text else b"").join(chunks) # type: ignore - async def read_data_frame(self, max_size): + async def read_data_frame(self, max_size: int) -> Optional[Frame]: """ Read a single data frame from the connection. @@ -834,7 +855,7 @@ async def read_data_frame(self, max_size): # serialize_close() because that fails when the close frame is # empty and parse_close() synthetizes a 1005 close code. await self.write_close_frame(frame.data) - return + return None elif frame.opcode == OP_PING: # Answer pings. @@ -851,7 +872,7 @@ async def read_data_frame(self, max_size): ping_id = None ping_ids = [] while ping_id != frame.data: - ping_id, pong_waiter = self.pings.popitem(0) + ping_id, pong_waiter = self.pings.popitem(last=False) ping_ids.append(ping_id) pong_waiter.set_result(None) pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" @@ -881,7 +902,7 @@ async def read_data_frame(self, max_size): else: return frame - async def read_frame(self, max_size): + async def read_frame(self, max_size: int) -> Frame: """ Read a single frame from the connection. @@ -895,7 +916,9 @@ async def read_frame(self, max_size): logger.debug("%s < %r", self.side, frame) return frame - async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): + async def write_frame( + self, fin: bool, opcode: int, data: bytes, *, _expected_state: int = State.OPEN + ) -> None: # Defensive assertion for protocol compliance. if self.state is not _expected_state: # pragma: no cover raise InvalidState( @@ -920,7 +943,7 @@ async def write_frame(self, fin, opcode, data, *, _expected_state=State.OPEN): # with the correct code and reason. await self.ensure_open() - async def write_close_frame(self, data=b""): + async def write_close_frame(self, data: bytes = b"") -> None: """ Write a close frame if and only if the connection state is OPEN. @@ -938,7 +961,7 @@ async def write_close_frame(self, data=b""): # 7.1.2. Start the WebSocket Closing Handshake await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) - async def keepalive_ping(self): + async def keepalive_ping(self) -> None: """ Send a Ping frame and wait for a Pong frame at regular intervals. @@ -978,7 +1001,7 @@ async def keepalive_ping(self): except Exception: logger.warning("Unexpected exception in keepalive ping task", exc_info=True) - async def close_connection(self): + async def close_connection(self) -> None: """ 7.1.1. Close the WebSocket Connection @@ -992,18 +1015,18 @@ async def close_connection(self): """ try: # Wait for the data transfer phase to complete. - if self.transfer_data_task is not None: + if hasattr(self, "transfer_data_task"): try: await self.transfer_data_task except asyncio.CancelledError: pass # Cancel the keepalive ping task. - if self.keepalive_ping_task is not None: + if hasattr(self, "keepalive_ping_task"): self.keepalive_ping_task.cancel() # A client should wait for a TCP close from the server. - if self.is_client and self.transfer_data_task is not None: + if self.is_client and hasattr(self, "transfer_data_task"): if await self.wait_for_connection_lost(): return logger.debug("%s ! timed out waiting for TCP close", self.side) @@ -1037,12 +1060,13 @@ async def close_connection(self): # Abort the TCP connection. Buffers are discarded. logger.debug("%s x aborting TCP connection", self.side) - self.writer.transport.abort() + # mypy thinks self.writer.transport is a BaseTransport, not a Transport. + self.writer.transport.abort() # type: ignore # connection_lost() is called quickly after aborting. await self.wait_for_connection_lost() - async def wait_for_connection_lost(self): + async def wait_for_connection_lost(self) -> bool: """ Wait until the TCP connection is closed or ``self.close_timeout`` elapses. @@ -1063,7 +1087,7 @@ async def wait_for_connection_lost(self): # and the moment this coroutine resumes running. return self.connection_lost_waiter.done() - def fail_connection(self, code=1006, reason=""): + def fail_connection(self, code: int = 1006, reason: str = "") -> None: """ 7.1.7. Fail the WebSocket Connection @@ -1091,7 +1115,7 @@ def fail_connection(self, code=1006, reason=""): # Cancel transfer_data_task if the opening handshake succeeded. # cancel() is idempotent and ignored if the task is done already. - if self.transfer_data_task is not None: + if hasattr(self, "transfer_data_task"): self.transfer_data_task.cancel() # Send a close frame when the state is OPEN (a close frame was already @@ -1121,10 +1145,10 @@ def fail_connection(self, code=1006, reason=""): ) # Start close_connection_task if the opening handshake didn't succeed. - if self.close_connection_task is None: + if not hasattr(self, "close_connection_task"): self.close_connection_task = self.loop.create_task(self.close_connection()) - def abort_keepalive_pings(self): + def abort_keepalive_pings(self) -> None: """ Raise ConnectionClosed in pending keepalive pings. @@ -1150,7 +1174,7 @@ def abort_keepalive_pings(self): # asyncio.StreamReaderProtocol methods - def connection_made(self, transport): + def connection_made(self, transport: asyncio.BaseTransport) -> None: """ Configure write buffer limits. @@ -1165,10 +1189,11 @@ def connection_made(self, transport): """ logger.debug("%s - event = connection_made(%s)", self.side, transport) - transport.set_write_buffer_limits(self.write_limit) + # mypy thinks transport is a BaseTransport, not a Transport. + transport.set_write_buffer_limits(self.write_limit) # type: ignore super().connection_made(transport) - def eof_received(self): + def eof_received(self) -> bool: """ Close the transport after receiving EOF. @@ -1193,9 +1218,9 @@ def eof_received(self): """ logger.debug("%s - event = eof_received()", self.side) super().eof_received() - return + return False - def connection_lost(self, exc): + def connection_lost(self, exc: Optional[Exception]) -> None: """ 7.1.4. The WebSocket Connection is Closed. @@ -1203,8 +1228,10 @@ def connection_lost(self, exc): logger.debug("%s - event = connection_lost(%s)", self.side, exc) self.state = State.CLOSED logger.debug("%s - state = CLOSED", self.side) - if self.close_code is None: + if not hasattr(self, "close_code"): self.close_code = 1006 + if not hasattr(self, "close_reason"): + self.close_reason = "" logger.debug( "%s x code = %d, reason = %s", self.side, diff --git a/src/websockets/server.py b/src/websockets/server.py index 7fd32ba1e..efb3ebee3 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -8,7 +8,23 @@ import email.utils import http import logging +import socket import warnings +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Generator, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) from .exceptions import ( AbortHandshake, @@ -19,10 +35,16 @@ InvalidUpgrade, NegotiationError, ) +from .extensions.base import Extension, ServerExtensionFactory from .extensions.permessage_deflate import ServerPerMessageDeflateFactory from .handshake import build_response, check_request -from .headers import build_extension_list, parse_extension_list, parse_subprotocol_list -from .http import USER_AGENT, Headers, MultipleValuesError, read_request +from .headers import ( + ExtensionHeader, + build_extension_list, + parse_extension_list, + parse_subprotocol_list, +) +from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request from .protocol import State, WebSocketCommonProtocol @@ -31,6 +53,11 @@ logger = logging.getLogger(__name__) +HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] + +HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes] + + class WebSocketServerProtocol(WebSocketCommonProtocol): """ Complete WebSocket server implementation as an :class:`asyncio.Protocol`. @@ -48,17 +75,22 @@ class WebSocketServerProtocol(WebSocketCommonProtocol): def __init__( self, - ws_handler, - ws_server, + ws_handler: Callable[["WebSocketServerProtocol", str], Awaitable[Any]], + ws_server: "WebSocketServer", *, - origins=None, - extensions=None, - subprotocols=None, - extra_headers=None, - process_request=None, - select_subprotocol=None, - **kwds, - ): + origins: Optional[List[Optional[str]]] = None, + extensions: Optional[List[ServerExtensionFactory]] = None, + subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[ + [str, Headers], + Union[Optional[HTTPResponse], Awaitable[Optional[HTTPResponse]]], + ] + ] = None, + select_subprotocol: Optional[Callable[[List[str], List[str]], str]] = None, + **kwds: Any, + ) -> None: # For backwards-compatibility with 6.0 or earlier. if origins is not None and "" in origins: warnings.warn("use None instead of '' in origins", DeprecationWarning) @@ -73,7 +105,7 @@ def __init__( self._select_subprotocol = select_subprotocol super().__init__(**kwds) - def connection_made(self, transport): + def connection_made(self, transport: asyncio.BaseTransport) -> None: """ Register connection and initialize a task to handle it. @@ -86,7 +118,7 @@ def connection_made(self, transport): self.ws_server.register(self) self.handler_task = self.loop.create_task(self.handler()) - async def handler(self): + async def handler(self) -> None: """ Handle the lifecycle of a WebSocket connection. @@ -114,34 +146,31 @@ async def handler(self): logger.debug("Invalid origin", exc_info=True) status, headers, body = ( http.HTTPStatus.FORBIDDEN, - [], + Headers(), (str(exc) + "\n").encode(), ) elif isinstance(exc, InvalidUpgrade): logger.debug("Invalid upgrade", exc_info=True) status, headers, body = ( http.HTTPStatus.UPGRADE_REQUIRED, - [("Upgrade", "websocket")], + Headers([("Upgrade", "websocket")]), (str(exc) + "\n").encode(), ) elif isinstance(exc, InvalidHandshake): logger.debug("Invalid handshake", exc_info=True) status, headers, body = ( http.HTTPStatus.BAD_REQUEST, - [], + Headers(), (str(exc) + "\n").encode(), ) else: logger.warning("Error in opening handshake", exc_info=True) status, headers, body = ( http.HTTPStatus.INTERNAL_SERVER_ERROR, - [], + Headers(), b"See server log for more information.\n", ) - if not isinstance(headers, Headers): - headers = Headers(headers) - headers.setdefault("Date", email.utils.formatdate(usegmt=True)) headers.setdefault("Server", USER_AGENT) headers.setdefault("Content-Length", str(len(body))) @@ -184,7 +213,7 @@ async def handler(self): # connections before terminating. self.ws_server.unregister(self) - async def read_http_request(self): + async def read_http_request(self) -> Tuple[str, Headers]: """ Read request line and headers from the HTTP request. @@ -209,7 +238,9 @@ async def read_http_request(self): return path, headers - def write_http_response(self, status, headers, body=None): + def write_http_response( + self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None + ) -> None: """ Write status line and headers to the HTTP response. @@ -232,7 +263,9 @@ def write_http_response(self, status, headers, body=None): logger.debug("%s > Body (%d bytes)", self.side, len(body)) self.writer.write(body) - def process_request(self, path, request_headers): + def process_request( + self, path: str, request_headers: Headers + ) -> Union[Optional[HTTPResponse], Awaitable[Optional[HTTPResponse]]]: """ Intercept the HTTP request and return an HTTP response if needed. @@ -264,9 +297,12 @@ def process_request(self, path, request_headers): """ if self._process_request is not None: return self._process_request(path, request_headers) + return None @staticmethod - def process_origin(headers, origins=None): + def process_origin( + headers: Headers, origins: Optional[List[Optional[str]]] = None + ) -> Optional[str]: """ Handle the Origin HTTP request header. @@ -286,7 +322,9 @@ def process_origin(headers, origins=None): return origin @staticmethod - def process_extensions(headers, available_extensions): + def process_extensions( + headers: Headers, available_extensions: Optional[List[ServerExtensionFactory]] + ) -> Tuple[Optional[str], List[Extension]]: """ Handle the Sec-WebSocket-Extensions HTTP request header. @@ -319,14 +357,16 @@ def process_extensions(headers, available_extensions): order of extensions, may be implemented by overriding this method. """ - response_header = [] - accepted_extensions = [] + response_header_value: Optional[str] = None + + extension_headers: List[ExtensionHeader] = [] + accepted_extensions: List[Extension] = [] header_values = headers.get_all("Sec-WebSocket-Extensions") if header_values and available_extensions: - parsed_header_values = sum( + parsed_header_values: List[ExtensionHeader] = sum( [parse_extension_list(header_value) for header_value in header_values], [], ) @@ -348,7 +388,7 @@ def process_extensions(headers, available_extensions): continue # Add matching extension to the final list. - response_header.append((name, response_params)) + extension_headers.append((name, response_params)) accepted_extensions.append(extension) # Break out of the loop once we have a match. @@ -358,15 +398,15 @@ def process_extensions(headers, available_extensions): # matched what the client sent. The extension is declined. # Serialize extension header. - if response_header: - response_header = build_extension_list(response_header) - else: - response_header = None + if extension_headers: + response_header_value = build_extension_list(extension_headers) - return response_header, accepted_extensions + return response_header_value, accepted_extensions # Not @staticmethod because it calls self.select_subprotocol() - def process_subprotocol(self, headers, available_subprotocols): + def process_subprotocol( + self, headers: Headers, available_subprotocols: Optional[List[str]] + ) -> Optional[str]: """ Handle the Sec-WebSocket-Protocol HTTP request header. @@ -374,13 +414,13 @@ def process_subprotocol(self, headers, available_subprotocols): as the selected subprotocol. """ - subprotocol = None + subprotocol: Optional[str] = None header_values = headers.get_all("Sec-WebSocket-Protocol") if header_values and available_subprotocols: - parsed_header_values = sum( + parsed_header_values: List[str] = sum( [ parse_subprotocol_list(header_value) for header_value in header_values @@ -394,7 +434,9 @@ def process_subprotocol(self, headers, available_subprotocols): return subprotocol - def select_subprotocol(self, client_subprotocols, server_subprotocols): + def select_subprotocol( + self, client_subprotocols: List[str], server_subprotocols: List[str] + ) -> Optional[str]: """ Pick a subprotocol among those offered by the client. @@ -427,11 +469,11 @@ def select_subprotocol(self, client_subprotocols, server_subprotocols): async def handshake( self, - origins=None, - available_extensions=None, - available_subprotocols=None, - extra_headers=None, - ): + origins: Optional[List[Optional[str]]] = None, + available_extensions: Optional[List[ServerExtensionFactory]] = None, + available_subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + ) -> str: """ Perform the server side of the opening handshake. @@ -460,10 +502,9 @@ async def handshake( # Hook for customizing request handling, for example checking # authentication or treating some paths as plain HTTP endpoints. - if asyncio.iscoroutinefunction(self.process_request): - early_response = await self.process_request(path, request_headers) - else: - early_response = self.process_request(path, request_headers) + early_response = self.process_request(path, request_headers) + if isinstance(early_response, Awaitable): + early_response = await early_response # Change the response to a 503 error if the server is shutting down. if not self.ws_server.is_serving(): @@ -538,20 +579,20 @@ class WebSocketServer: """ - def __init__(self, loop): + def __init__(self, loop: asyncio.AbstractEventLoop): # Store a reference to loop to avoid relying on self.server._loop. self.loop = loop # Keep track of active connections. - self.websockets = set() + self.websockets: Set[WebSocketServerProtocol] = set() # Task responsible for closing the server and terminating connections. - self.close_task = None + self.close_task: Optional[asyncio.Task[None]] = None # Completed when the server is closed and connections are terminated. - self.closed_waiter = loop.create_future() + self.closed_waiter: asyncio.Future[None] = loop.create_future() - def wrap(self, server): + def wrap(self, server: asyncio.AbstractServer) -> None: """ Attach to a given :class:`~asyncio.Server`. @@ -568,31 +609,33 @@ def wrap(self, server): """ self.server = server - def register(self, protocol): + def register(self, protocol: WebSocketServerProtocol) -> None: """ Register a connection with this server. """ self.websockets.add(protocol) - def unregister(self, protocol): + def unregister(self, protocol: WebSocketServerProtocol) -> None: """ Unregister a connection with this server. """ self.websockets.remove(protocol) - def is_serving(self): + def is_serving(self) -> bool: """ Tell whether the server is accepting new connections or shutting down. """ try: - return self.server.is_serving() # Python ≥ 3.7 + # Python ≥ 3.7 + return self.server.is_serving() # type: ignore except AttributeError: # pragma: no cover - return self.server.sockets is not None # Python < 3.7 + # Python < 3.7 + return self.server.sockets is not None - def close(self): + def close(self) -> None: """ Close the server and terminate connections with close code 1001. @@ -602,7 +645,7 @@ def close(self): if self.close_task is None: self.close_task = self.loop.create_task(self._close()) - async def _close(self): + async def _close(self) -> None: """ Implementation of :meth:`close`. @@ -647,7 +690,7 @@ async def _close(self): # Tell wait_closed() to return. self.closed_waiter.set_result(None) - async def wait_closed(self): + async def wait_closed(self) -> None: """ Wait until the server is closed and all connections are terminated. @@ -658,7 +701,7 @@ async def wait_closed(self): await asyncio.shield(self.closed_waiter) @property - def sockets(self): + def sockets(self) -> Optional[List[socket.socket]]: """ List of :class:`~socket.socket` objects the server is listening to. @@ -754,31 +797,33 @@ class Serve: def __init__( self, - ws_handler, - host=None, - port=None, + ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + host: Optional[Union[str, Sequence[str]]] = None, + port: Optional[int] = None, *, - path=None, - create_protocol=None, - ping_interval=20, - ping_timeout=20, - close_timeout=None, - max_size=2 ** 20, - max_queue=2 ** 5, - read_limit=2 ** 16, - write_limit=2 ** 16, - loop=None, - legacy_recv=False, - klass=WebSocketServerProtocol, - timeout=10, - compression="deflate", - origins=None, - extensions=None, - subprotocols=None, - extra_headers=None, - process_request=None, - select_subprotocol=None, - **kwds, + path: Optional[str] = None, + create_protocol: Optional[Type[WebSocketServerProtocol]] = None, + ping_interval: float = 20, + ping_timeout: float = 20, + close_timeout: Optional[float] = None, + max_size: int = 2 ** 20, + max_queue: int = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + legacy_recv: bool = False, + klass: Type[WebSocketServerProtocol] = WebSocketServerProtocol, + timeout: float = 10, + compression: Optional[str] = "deflate", + origins: Optional[List[Optional[str]]] = None, + extensions: Optional[List[ServerExtensionFactory]] = None, + subprotocols: Optional[List[str]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[[str, Headers], Optional[HTTPResponse]] + ] = None, + select_subprotocol: Optional[Callable[[List[str], List[str]], str]] = None, + **kwds: Any, ): # Backwards-compatibility: close_timeout used to be called timeout. # If both are specified, timeout is ignored. @@ -832,6 +877,9 @@ def __init__( ) if path is None: + # https://github.com/python/typeshed/pull/2763 + host = cast(str, host) + port = cast(int, port) creating_server = loop.create_server(factory, host, port, **kwds) else: creating_server = loop.create_unix_server(factory, path, **kwds) @@ -841,22 +889,27 @@ def __init__( self.ws_server = ws_server @asyncio.coroutine - def __iter__(self): - return self.__await_impl__() + def __iter__(self) -> Generator[Any, None, WebSocketServer]: + return (yield from self.__await__()) - async def __aenter__(self): + async def __aenter__(self) -> WebSocketServer: return await self - async def __aexit__(self, exc_type, exc_value, traceback): + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: self.ws_server.close() await self.ws_server.wait_closed() - async def __await_impl__(self): + async def __await_impl__(self) -> WebSocketServer: server = await self._creating_server self.ws_server.wrap(server) return self.ws_server - def __await__(self): + def __await__(self) -> Generator[Any, None, WebSocketServer]: # __await__() must return a type that I don't know how to obtain except # by calling __await__() on the return value of an async function. # I'm not finding a better way to take advantage of PEP 492. @@ -866,7 +919,11 @@ def __await__(self): serve = Serve -def unix_serve(ws_handler, path, **kwargs): +def unix_serve( + ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + path: str, + **kwargs: Any, +) -> Serve: """ Similar to :func:`serve()`, but for listening on Unix sockets. diff --git a/src/websockets/uri.py b/src/websockets/uri.py index 730adf54e..cf6b798ee 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -6,17 +6,29 @@ """ -import collections import urllib.parse +from typing import NamedTuple, Optional, Tuple from .exceptions import InvalidURI __all__ = ["parse_uri", "WebSocketURI"] -WebSocketURI = collections.namedtuple( - "WebSocketURI", ["secure", "host", "port", "resource_name", "user_info"] +# Switch to class-based syntax when dropping support for Python < 3.6. + +# Convert to a dataclass when dropping support for Python < 3.7. + +WebSocketURI = NamedTuple( + "WebSocketURI", + [ + ("secure", bool), + ("host", str), + ("port", int), + ("resource_name", str), + ("user_info", Optional[Tuple[str, str]]), + ], ) + WebSocketURI.__doc__ = """WebSocket URI. * ``secure`` is the secure flag @@ -31,7 +43,7 @@ """ -def parse_uri(uri): +def parse_uri(uri: str) -> WebSocketURI: """ This function parses and validates a WebSocket URI. @@ -40,22 +52,22 @@ def parse_uri(uri): Otherwise it raises an :exc:`~websockets.exceptions.InvalidURI` exception. """ - uri = urllib.parse.urlparse(uri) + parsed = urllib.parse.urlparse(uri) try: - assert uri.scheme in ["ws", "wss"] - assert uri.params == "" - assert uri.fragment == "" - assert uri.hostname is not None + assert parsed.scheme in ["ws", "wss"] + assert parsed.params == "" + assert parsed.fragment == "" + assert parsed.hostname is not None except AssertionError as exc: - raise InvalidURI(f"{uri} isn't a valid URI") from exc - - secure = uri.scheme == "wss" - host = uri.hostname - port = uri.port or (443 if secure else 80) - resource_name = uri.path or "/" - if uri.query: - resource_name += "?" + uri.query + raise InvalidURI(uri) from exc + + secure = parsed.scheme == "wss" + host = parsed.hostname + port = parsed.port or (443 if secure else 80) + resource_name = parsed.path or "/" + if parsed.query: + resource_name += "?" + parsed.query user_info = None - if uri.username or uri.password: - user_info = (uri.username, uri.password) + if parsed.username or parsed.password: + user_info = (parsed.username, parsed.password) return WebSocketURI(secure, host, port, resource_name, user_info) diff --git a/src/websockets/utils.py b/src/websockets/utils.py index 193f8fc32..e289e6980 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -4,7 +4,7 @@ __all__ = ["apply_mask"] -def apply_mask(data, mask): +def apply_mask(data: bytes, mask: bytes) -> bytes: """ Apply masking to the data of a WebSocket message. diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index 80003ca2d..0ec49c6c0 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -37,6 +37,225 @@ def assertExtensionEqual(self, extension1, extension2): ) +class PerMessageDeflateTests(unittest.TestCase, ExtensionTestsMixin): + def setUp(self): + # Set up an instance of the permessage-deflate extension with the most + # common settings. Since the extension is symmetrical, this instance + # may be used for testing both encoding and decoding. + self.extension = PerMessageDeflate(False, False, 15, 15) + + def test_name(self): + assert self.extension.name == "permessage-deflate" + + def test_repr(self): + self.assertExtensionEqual(eval(repr(self.extension)), self.extension) + + # Control frames aren't encoded or decoded. + + def test_no_encode_decode_ping_frame(self): + frame = Frame(True, OP_PING, b"") + + self.assertEqual(self.extension.encode(frame), frame) + + self.assertEqual(self.extension.decode(frame), frame) + + def test_no_encode_decode_pong_frame(self): + frame = Frame(True, OP_PONG, b"") + + self.assertEqual(self.extension.encode(frame), frame) + + self.assertEqual(self.extension.decode(frame), frame) + + def test_no_encode_decode_close_frame(self): + frame = Frame(True, OP_CLOSE, serialize_close(1000, "")) + + self.assertEqual(self.extension.encode(frame), frame) + + self.assertEqual(self.extension.decode(frame), frame) + + # Data frames are encoded and decoded. + + def test_encode_decode_text_frame(self): + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + enc_frame = self.extension.encode(frame) + + self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"JNL;\xbc\x12\x00")) + + dec_frame = self.extension.decode(enc_frame) + + self.assertEqual(dec_frame, frame) + + def test_encode_decode_binary_frame(self): + frame = Frame(True, OP_BINARY, b"tea") + + enc_frame = self.extension.encode(frame) + + self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"*IM\x04\x00")) + + dec_frame = self.extension.decode(enc_frame) + + self.assertEqual(dec_frame, frame) + + def test_encode_decode_fragmented_text_frame(self): + frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) + frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) + frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) + + enc_frame1 = self.extension.encode(frame1) + enc_frame2 = self.extension.encode(frame2) + enc_frame3 = self.extension.encode(frame3) + + self.assertEqual( + enc_frame1, + frame1._replace(rsv1=True, data=b"JNL;\xbc\x12\x00\x00\x00\xff\xff"), + ) + self.assertEqual( + enc_frame2, frame2._replace(rsv1=True, data=b"RPS\x00\x00\x00\x00\xff\xff") + ) + self.assertEqual( + enc_frame3, frame3._replace(rsv1=True, data=b"J.\xca\xcf,.N\xcc+)\x06\x00") + ) + + dec_frame1 = self.extension.decode(enc_frame1) + dec_frame2 = self.extension.decode(enc_frame2) + dec_frame3 = self.extension.decode(enc_frame3) + + self.assertEqual(dec_frame1, frame1) + self.assertEqual(dec_frame2, frame2) + self.assertEqual(dec_frame3, frame3) + + def test_encode_decode_fragmented_binary_frame(self): + frame1 = Frame(False, OP_TEXT, b"tea ") + frame2 = Frame(True, OP_CONT, b"time") + + enc_frame1 = self.extension.encode(frame1) + enc_frame2 = self.extension.encode(frame2) + + self.assertEqual( + enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff") + ) + self.assertEqual( + enc_frame2, frame2._replace(rsv1=True, data=b"*\xc9\xccM\x05\x00") + ) + + dec_frame1 = self.extension.decode(enc_frame1) + dec_frame2 = self.extension.decode(enc_frame2) + + self.assertEqual(dec_frame1, frame1) + self.assertEqual(dec_frame2, frame2) + + def test_no_decode_text_frame(self): + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + # Try decoding a frame that wasn't encoded. + self.assertEqual(self.extension.decode(frame), frame) + + def test_no_decode_binary_frame(self): + frame = Frame(True, OP_TEXT, b"tea") + + # Try decoding a frame that wasn't encoded. + self.assertEqual(self.extension.decode(frame), frame) + + def test_no_decode_fragmented_text_frame(self): + frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) + frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) + frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) + + dec_frame1 = self.extension.decode(frame1) + dec_frame2 = self.extension.decode(frame2) + dec_frame3 = self.extension.decode(frame3) + + self.assertEqual(dec_frame1, frame1) + self.assertEqual(dec_frame2, frame2) + self.assertEqual(dec_frame3, frame3) + + def test_no_decode_fragmented_binary_frame(self): + frame1 = Frame(False, OP_TEXT, b"tea ") + frame2 = Frame(True, OP_CONT, b"time") + + dec_frame1 = self.extension.decode(frame1) + dec_frame2 = self.extension.decode(frame2) + + self.assertEqual(dec_frame1, frame1) + self.assertEqual(dec_frame2, frame2) + + def test_context_takeover(self): + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + enc_frame1 = self.extension.encode(frame) + enc_frame2 = self.extension.encode(frame) + + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") + + def test_remote_no_context_takeover(self): + # No context takeover when decoding messages. + self.extension = PerMessageDeflate(True, False, 15, 15) + + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + enc_frame1 = self.extension.encode(frame) + enc_frame2 = self.extension.encode(frame) + + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") + + dec_frame1 = self.extension.decode(enc_frame1) + self.assertEqual(dec_frame1, frame) + + with self.assertRaises(zlib.error) as exc: + self.extension.decode(enc_frame2) + self.assertIn("invalid distance too far back", str(exc.exception)) + + def test_local_no_context_takeover(self): + # No context takeover when encoding and decoding messages. + self.extension = PerMessageDeflate(True, True, 15, 15) + + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + enc_frame1 = self.extension.encode(frame) + enc_frame2 = self.extension.encode(frame) + + self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") + self.assertEqual(enc_frame2.data, b"JNL;\xbc\x12\x00") + + dec_frame1 = self.extension.decode(enc_frame1) + dec_frame2 = self.extension.decode(enc_frame2) + + self.assertEqual(dec_frame1, frame) + self.assertEqual(dec_frame2, frame) + + # Compression settings can be customized. + + def test_compress_settings(self): + # Configure an extension so that no compression actually occurs. + extension = PerMessageDeflate(False, False, 15, 15, {"level": 0}) + + frame = Frame(True, OP_TEXT, "café".encode("utf-8")) + + enc_frame = extension.encode(frame) + + self.assertEqual( + enc_frame, + frame._replace( + rsv1=True, data=b"\x00\x05\x00\xfa\xffcaf\xc3\xa9\x00" # not compressed + ), + ) + + # Frames aren't decoded beyond max_length. + + def test_decompress_max_size(self): + frame = Frame(True, OP_TEXT, ("a" * 20).encode("utf-8")) + + enc_frame = self.extension.encode(frame) + + self.assertEqual(enc_frame.data, b"JL\xc4\x04\x00\x00") + + with self.assertRaises(PayloadTooBig): + self.extension.decode(enc_frame, max_size=10) + + class ClientPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin): def test_name(self): assert ClientPerMessageDeflateFactory.name == "permessage-deflate" @@ -571,222 +790,3 @@ def test_process_response_params_deduplication(self): factory.process_request_params( [], [PerMessageDeflate(False, False, 15, 15)] ) - - -class PerMessageDeflateTests(unittest.TestCase, ExtensionTestsMixin): - def setUp(self): - # Set up an instance of the permessage-deflate extension with the most - # common settings. Since the extension is symmetrical, this instance - # may be used for testing both encoding and decoding. - self.extension = PerMessageDeflate(False, False, 15, 15) - - def test_name(self): - assert self.extension.name == "permessage-deflate" - - def test_repr(self): - self.assertExtensionEqual(eval(repr(self.extension)), self.extension) - - # Control frames aren't encoded or decoded. - - def test_no_encode_decode_ping_frame(self): - frame = Frame(True, OP_PING, b"") - - self.assertEqual(self.extension.encode(frame), frame) - - self.assertEqual(self.extension.decode(frame), frame) - - def test_no_encode_decode_pong_frame(self): - frame = Frame(True, OP_PONG, b"") - - self.assertEqual(self.extension.encode(frame), frame) - - self.assertEqual(self.extension.decode(frame), frame) - - def test_no_encode_decode_close_frame(self): - frame = Frame(True, OP_CLOSE, serialize_close(1000, "")) - - self.assertEqual(self.extension.encode(frame), frame) - - self.assertEqual(self.extension.decode(frame), frame) - - # Data frames are encoded and decoded. - - def test_encode_decode_text_frame(self): - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - enc_frame = self.extension.encode(frame) - - self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"JNL;\xbc\x12\x00")) - - dec_frame = self.extension.decode(enc_frame) - - self.assertEqual(dec_frame, frame) - - def test_encode_decode_binary_frame(self): - frame = Frame(True, OP_BINARY, b"tea") - - enc_frame = self.extension.encode(frame) - - self.assertEqual(enc_frame, frame._replace(rsv1=True, data=b"*IM\x04\x00")) - - dec_frame = self.extension.decode(enc_frame) - - self.assertEqual(dec_frame, frame) - - def test_encode_decode_fragmented_text_frame(self): - frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) - frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) - frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) - - enc_frame1 = self.extension.encode(frame1) - enc_frame2 = self.extension.encode(frame2) - enc_frame3 = self.extension.encode(frame3) - - self.assertEqual( - enc_frame1, - frame1._replace(rsv1=True, data=b"JNL;\xbc\x12\x00\x00\x00\xff\xff"), - ) - self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b"RPS\x00\x00\x00\x00\xff\xff") - ) - self.assertEqual( - enc_frame3, frame3._replace(rsv1=True, data=b"J.\xca\xcf,.N\xcc+)\x06\x00") - ) - - dec_frame1 = self.extension.decode(enc_frame1) - dec_frame2 = self.extension.decode(enc_frame2) - dec_frame3 = self.extension.decode(enc_frame3) - - self.assertEqual(dec_frame1, frame1) - self.assertEqual(dec_frame2, frame2) - self.assertEqual(dec_frame3, frame3) - - def test_encode_decode_fragmented_binary_frame(self): - frame1 = Frame(False, OP_TEXT, b"tea ") - frame2 = Frame(True, OP_CONT, b"time") - - enc_frame1 = self.extension.encode(frame1) - enc_frame2 = self.extension.encode(frame2) - - self.assertEqual( - enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff") - ) - self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b"*\xc9\xccM\x05\x00") - ) - - dec_frame1 = self.extension.decode(enc_frame1) - dec_frame2 = self.extension.decode(enc_frame2) - - self.assertEqual(dec_frame1, frame1) - self.assertEqual(dec_frame2, frame2) - - def test_no_decode_text_frame(self): - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - # Try decoding a frame that wasn't encoded. - self.assertEqual(self.extension.decode(frame), frame) - - def test_no_decode_binary_frame(self): - frame = Frame(True, OP_TEXT, b"tea") - - # Try decoding a frame that wasn't encoded. - self.assertEqual(self.extension.decode(frame), frame) - - def test_no_decode_fragmented_text_frame(self): - frame1 = Frame(False, OP_TEXT, "café".encode("utf-8")) - frame2 = Frame(False, OP_CONT, " & ".encode("utf-8")) - frame3 = Frame(True, OP_CONT, "croissants".encode("utf-8")) - - dec_frame1 = self.extension.decode(frame1) - dec_frame2 = self.extension.decode(frame2) - dec_frame3 = self.extension.decode(frame3) - - self.assertEqual(dec_frame1, frame1) - self.assertEqual(dec_frame2, frame2) - self.assertEqual(dec_frame3, frame3) - - def test_no_decode_fragmented_binary_frame(self): - frame1 = Frame(False, OP_TEXT, b"tea ") - frame2 = Frame(True, OP_CONT, b"time") - - dec_frame1 = self.extension.decode(frame1) - dec_frame2 = self.extension.decode(frame2) - - self.assertEqual(dec_frame1, frame1) - self.assertEqual(dec_frame2, frame2) - - def test_context_takeover(self): - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - enc_frame1 = self.extension.encode(frame) - enc_frame2 = self.extension.encode(frame) - - self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") - self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") - - def test_remote_no_context_takeover(self): - # No context takeover when decoding messages. - self.extension = PerMessageDeflate(True, False, 15, 15) - - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - enc_frame1 = self.extension.encode(frame) - enc_frame2 = self.extension.encode(frame) - - self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") - self.assertEqual(enc_frame2.data, b"J\x06\x11\x00\x00") - - dec_frame1 = self.extension.decode(enc_frame1) - self.assertEqual(dec_frame1, frame) - - with self.assertRaises(zlib.error) as exc: - self.extension.decode(enc_frame2) - self.assertIn("invalid distance too far back", str(exc.exception)) - - def test_local_no_context_takeover(self): - # No context takeover when encoding and decoding messages. - self.extension = PerMessageDeflate(True, True, 15, 15) - - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - enc_frame1 = self.extension.encode(frame) - enc_frame2 = self.extension.encode(frame) - - self.assertEqual(enc_frame1.data, b"JNL;\xbc\x12\x00") - self.assertEqual(enc_frame2.data, b"JNL;\xbc\x12\x00") - - dec_frame1 = self.extension.decode(enc_frame1) - dec_frame2 = self.extension.decode(enc_frame2) - - self.assertEqual(dec_frame1, frame) - self.assertEqual(dec_frame2, frame) - - # Compression settings can be customized. - - def test_compress_settings(self): - # Configure an extension so that no compression actually occurs. - extension = PerMessageDeflate(False, False, 15, 15, {"level": 0}) - - frame = Frame(True, OP_TEXT, "café".encode("utf-8")) - - enc_frame = extension.encode(frame) - - self.assertEqual( - enc_frame, - frame._replace( - rsv1=True, data=b"\x00\x05\x00\xfa\xffcaf\xc3\xa9\x00" # not compressed - ), - ) - - # Frames aren't decoded beyond max_length. - - def test_decompress_max_size(self): - frame = Frame(True, OP_TEXT, ("a" * 20).encode("utf-8")) - - enc_frame = self.extension.encode(frame) - - self.assertEqual(enc_frame.data, b"JL\xc4\x04\x00\x00") - - with self.assertRaises(PayloadTooBig): - self.extension.decode(enc_frame, max_size=10) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 7b935491b..3ccdadb82 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -115,7 +115,7 @@ def test_str(self): "(private use), no reason" ), ( - InvalidURI("| isn't a valid URI"), + InvalidURI("|"), "| isn't a valid URI", ), ( diff --git a/tests/test_http.py b/tests/test_http.py index a3a8cd403..39961d641 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -130,6 +130,9 @@ def test_contains_case_insensitive(self): def test_contains_not_found(self): self.assertNotIn("Date", self.headers) + def test_contains_non_string_key(self): + self.assertNotIn(42, self.headers) + def test_iter(self): self.assertEqual(set(iter(self.headers)), {"connection", "server"}) diff --git a/tox.ini b/tox.ini index 4d085f56c..7397c90ae 100644 --- a/tox.ini +++ b/tox.ini @@ -24,5 +24,5 @@ commands = isort --check-only --recursive src tests deps = isort [testenv:mypy] -commands = mypy src +commands = mypy --strict src deps = mypy From 03c1fb657e406c3f707b5605b44fa63af188f1f8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 10:27:52 +0100 Subject: [PATCH 052/281] Simplify NamedTuple declarations with class syntax. --- src/websockets/framing.py | 23 ++++++++++------------- src/websockets/uri.py | 32 ++++++++++++++++---------------- 2 files changed, 26 insertions(+), 29 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 8eb1a79bd..15b76eb93 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -64,22 +64,19 @@ Data = Union[str, bytes] +# Remove FrameData when dropping support for Python < 3.6.1 — the first +# version where NamedTuple supports default values, methods, and docstrings. -# Switch to class-based syntax when dropping support for Python < 3.6. +# Consider converting to a dataclass when dropping support for Python < 3.7. -# Convert to a dataclass when dropping support for Python < 3.7. -FrameData = NamedTuple( - "FrameData", - [ - ("fin", bool), - ("opcode", int), - ("data", bytes), - ("rsv1", bool), - ("rsv2", bool), - ("rsv3", bool), - ], -) +class FrameData(NamedTuple): + fin: bool + opcode: int + data: bytes + rsv1: bool + rsv2: bool + rsv3: bool class Frame(FrameData): diff --git a/src/websockets/uri.py b/src/websockets/uri.py index cf6b798ee..16d3d6761 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -14,22 +14,22 @@ __all__ = ["parse_uri", "WebSocketURI"] -# Switch to class-based syntax when dropping support for Python < 3.6. - -# Convert to a dataclass when dropping support for Python < 3.7. - -WebSocketURI = NamedTuple( - "WebSocketURI", - [ - ("secure", bool), - ("host", str), - ("port", int), - ("resource_name", str), - ("user_info", Optional[Tuple[str, str]]), - ], -) - -WebSocketURI.__doc__ = """WebSocket URI. + +# Consider converting to a dataclass when dropping support for Python < 3.7. + + +class WebSocketURI(NamedTuple): + secure: bool + host: str + port: int + resource_name: str + user_info: Optional[Tuple[str, str]] + + +# Declare the docstring normally when dropping support for Python < 3.6.1. + +WebSocketURI.__doc__ = """ +WebSocket URI. * ``secure`` is the secure flag * ``host`` is the lower-case host From 98b5e854d89686bbb33079cefbdf2cd83f3ca1c4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 10:53:07 +0100 Subject: [PATCH 053/281] Move shared types to a typing module. --- docs/api.rst | 8 ++++++++ src/websockets/__init__.py | 2 ++ src/websockets/framing.py | 4 +--- src/websockets/protocol.py | 2 +- src/websockets/typing.py | 19 +++++++++++++++++++ 5 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 src/websockets/typing.py diff --git a/docs/api.rst b/docs/api.rst index ce6529d1d..acdc69dab 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -86,6 +86,14 @@ Shared .. autoattribute:: open .. autoattribute:: closed +Types +..... + +.. automodule:: websockets.typing + + .. autodata:: Data + + Per-Message Deflate Extension ............................. diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 5fbff0d41..9bfbdabfe 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -4,6 +4,7 @@ from .exceptions import * from .protocol import * from .server import * +from .typing import * from .uri import * from .version import version as __version__ # noqa @@ -13,5 +14,6 @@ + exceptions.__all__ + protocol.__all__ + server.__all__ + + typing.__all__ + uri.__all__ ) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 15b76eb93..0a778ed53 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -21,10 +21,10 @@ NamedTuple, Optional, Tuple, - Union, ) from .exceptions import PayloadTooBig, WebSocketProtocolError +from .typing import Data if TYPE_CHECKING: # pragma: no cover @@ -62,8 +62,6 @@ EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] -Data = Union[str, bytes] - # Remove FrameData when dropping support for Python < 3.6.1 — the first # version where NamedTuple supports default values, methods, and docstrings. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index b28dcef72..f4dbbb279 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -35,9 +35,9 @@ ) from .extensions.base import Extension from .framing import * -from .framing import Data from .handshake import * from .http import Headers +from .typing import Data __all__ = ["WebSocketCommonProtocol"] diff --git a/src/websockets/typing.py b/src/websockets/typing.py new file mode 100644 index 000000000..2f0c50c59 --- /dev/null +++ b/src/websockets/typing.py @@ -0,0 +1,19 @@ +from typing import Union + + +__all__ = ["Data"] + +Data = Union[str, bytes] + +Data__doc__ = """ +Types supported in a WebSocket message: + +- :class:`str` for text messages +- :class:`bytes` for binary messages + +""" + +try: + Data.__doc__ = Data__doc__ # type: ignore +except AttributeError: # pragma: no cover + pass From 82b71b782ef776e6643c7e0208cba87538baf389 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 19:01:45 +0100 Subject: [PATCH 054/281] Improve typing declarations. * Create new types for the values of various HTTP headers. * Prefer Sequence (generic type) to List in parameter types -- this has the nice side effect of preventing modification of mutable parameters. And refactor a bit the headers module for readability. --- src/websockets/client.py | 57 +++-- src/websockets/exceptions.py | 4 +- src/websockets/extensions/base.py | 16 +- .../extensions/permessage_deflate.py | 22 +- src/websockets/framing.py | 6 +- src/websockets/headers.py | 221 ++++++++++-------- src/websockets/server.py | 69 +++--- src/websockets/typing.py | 13 +- tests/test_headers.py | 28 +-- 9 files changed, 242 insertions(+), 194 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 40c5b0073..9cefaedb8 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -7,7 +7,7 @@ import collections.abc import logging from types import TracebackType -from typing import Any, Generator, List, Optional, Tuple, Type, cast +from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast from .exceptions import ( InvalidHandshake, @@ -22,13 +22,14 @@ from .headers import ( ExtensionHeader, build_basic_auth, - build_extension_list, - build_subprotocol_list, - parse_extension_list, - parse_subprotocol_list, + build_extension, + build_subprotocol, + parse_extension, + parse_subprotocol, ) from .http import USER_AGENT, Headers, HeadersLike, read_response from .protocol import WebSocketCommonProtocol +from .typing import Origin, Subprotocol from .uri import WebSocketURI, parse_uri @@ -52,9 +53,9 @@ class WebSocketClientProtocol(WebSocketCommonProtocol): def __init__( self, *, - origin: Optional[str] = None, - extensions: Optional[List[ClientExtensionFactory]] = None, - subprotocols: Optional[List[str]] = None, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, **kwds: Any, ) -> None: @@ -108,7 +109,8 @@ async def read_http_response(self) -> Tuple[int, Headers]: @staticmethod def process_extensions( - headers: Headers, available_extensions: Optional[List[ClientExtensionFactory]] + headers: Headers, + available_extensions: Optional[Sequence[ClientExtensionFactory]], ) -> List[Extension]: """ Handle the Sec-WebSocket-Extensions HTTP response header. @@ -146,8 +148,7 @@ def process_extensions( raise InvalidHandshake("No extensions supported") parsed_header_values: List[ExtensionHeader] = sum( - [parse_extension_list(header_value) for header_value in header_values], - [], + [parse_extension(header_value) for header_value in header_values], [] ) for name, response_params in parsed_header_values: @@ -184,8 +185,8 @@ def process_extensions( @staticmethod def process_subprotocol( - headers: Headers, available_subprotocols: Optional[List[str]] - ) -> Optional[str]: + headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: """ Handle the Sec-WebSocket-Protocol HTTP response header. @@ -194,7 +195,7 @@ def process_subprotocol( Return the selected subprotocol. """ - subprotocol: Optional[str] = None + subprotocol: Optional[Subprotocol] = None header_values = headers.get_all("Sec-WebSocket-Protocol") @@ -203,12 +204,8 @@ def process_subprotocol( if available_subprotocols is None: raise InvalidHandshake("No subprotocols supported") - parsed_header_values: List[str] = sum( - [ - parse_subprotocol_list(header_value) - for header_value in header_values - ], - [], + parsed_header_values: Sequence[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] ) if len(parsed_header_values) > 1: @@ -225,9 +222,9 @@ def process_subprotocol( async def handshake( self, wsuri: WebSocketURI, - origin: Optional[str] = None, - available_extensions: Optional[List[ClientExtensionFactory]] = None, - available_subprotocols: Optional[List[str]] = None, + origin: Optional[Origin] = None, + available_extensions: Optional[Sequence[ClientExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, ) -> None: """ @@ -266,7 +263,7 @@ async def handshake( key = build_request(request_headers) if available_extensions is not None: - extensions_header = build_extension_list( + extensions_header = build_extension( [ (extension_factory.name, extension_factory.get_request_params()) for extension_factory in available_extensions @@ -275,7 +272,7 @@ async def handshake( request_headers["Sec-WebSocket-Extensions"] = extensions_header if available_subprotocols is not None: - protocol_header = build_subprotocol_list(available_subprotocols) + protocol_header = build_subprotocol(available_subprotocols) request_headers["Sec-WebSocket-Protocol"] = protocol_header if extra_headers is not None: @@ -382,9 +379,9 @@ def __init__( klass: Type[WebSocketClientProtocol] = WebSocketClientProtocol, timeout: float = 10, compression: Optional[str] = "deflate", - origin: Optional[str] = None, - extensions: Optional[List[ClientExtensionFactory]] = None, - subprotocols: Optional[List[str]] = None, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, **kwds: Any, ): @@ -417,9 +414,9 @@ def __init__( extension_factory.name == ClientPerMessageDeflateFactory.name for extension_factory in extensions ): - extensions.append( + extensions = list(extensions) + [ ClientPerMessageDeflateFactory(client_max_window_bits=True) - ) + ] elif compression is not None: raise ValueError(f"Unsupported compression: {compression}") diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 9999527ef..436c594a9 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -94,8 +94,8 @@ class InvalidHeaderFormat(InvalidHeader): """ - def __init__(self, name: str, error: str, string: str, pos: int) -> None: - error = f"{error} at {pos} in {string}" + def __init__(self, name: str, error: str, header: str, pos: int) -> None: + error = f"{error} at {pos} in {header}" super().__init__(name, error) diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index 707e9317a..ed847c6bc 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -6,10 +6,10 @@ """ -from typing import List, Optional, Tuple +from typing import List, Optional, Sequence, Tuple from ..framing import Frame -from ..headers import ExtensionParameters +from ..typing import ExtensionParameter __all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] @@ -60,7 +60,7 @@ def name(self) -> str: """ - def get_request_params(self) -> ExtensionParameters: + def get_request_params(self) -> List[ExtensionParameter]: """ Build request parameters. @@ -69,7 +69,9 @@ def get_request_params(self) -> ExtensionParameters: """ def process_response_params( - self, params: ExtensionParameters, accepted_extensions: List[Extension] + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], ) -> Extension: """ Process response parameters received from the server. @@ -100,8 +102,10 @@ def name(self) -> str: """ def process_request_params( - self, params: ExtensionParameters, accepted_extensions: List[Extension] - ) -> Tuple[ExtensionParameters, Extension]: + self, + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence[Extension], + ) -> Tuple[List[ExtensionParameter], Extension]: """ Process request parameters received from the client. diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 93698a363..145cb2bbe 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -5,7 +5,7 @@ """ import zlib -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union from ..exceptions import ( DuplicateParameter, @@ -15,7 +15,7 @@ PayloadTooBig, ) from ..framing import CTRL_OPCODES, OP_CONT, Frame -from ..headers import ExtensionParameters +from ..typing import ExtensionParameter from .base import ClientExtensionFactory, Extension, ServerExtensionFactory @@ -174,12 +174,12 @@ def _build_parameters( client_no_context_takeover: bool, server_max_window_bits: Optional[int], client_max_window_bits: Optional[Union[int, bool]], -) -> ExtensionParameters: +) -> List[ExtensionParameter]: """ Build a list of ``(name, value)`` pairs for some compression parameters. """ - params: ExtensionParameters = [] + params: List[ExtensionParameter] = [] if server_no_context_takeover: params.append(("server_no_context_takeover", None)) if client_no_context_takeover: @@ -194,7 +194,7 @@ def _build_parameters( def _extract_parameters( - params: ExtensionParameters, *, is_server: bool + params: Sequence[ExtensionParameter], *, is_server: bool ) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]: """ Extract compression parameters from a list of ``(name, value)`` pairs. @@ -310,7 +310,7 @@ def __init__( self.client_max_window_bits = client_max_window_bits self.compress_settings = compress_settings - def get_request_params(self) -> ExtensionParameters: + def get_request_params(self) -> List[ExtensionParameter]: """ Build request parameters. @@ -324,8 +324,8 @@ def get_request_params(self) -> ExtensionParameters: def process_response_params( self, - params: List[Tuple[str, Optional[str]]], - accepted_extensions: List["Extension"], + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence["Extension"], ) -> PerMessageDeflate: """ Process response parameters. @@ -481,9 +481,9 @@ def __init__( def process_request_params( self, - params: List[Tuple[str, Optional[str]]], - accepted_extensions: List["Extension"], - ) -> Tuple[ExtensionParameters, PerMessageDeflate]: + params: Sequence[ExtensionParameter], + accepted_extensions: Sequence["Extension"], + ) -> Tuple[List[ExtensionParameter], PerMessageDeflate]: """ Process request parameters. diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 0a778ed53..1409c7d69 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -17,9 +17,9 @@ Any, Awaitable, Callable, - List, NamedTuple, Optional, + Sequence, Tuple, ) @@ -112,7 +112,7 @@ async def read( *, mask: bool, max_size: Optional[int] = None, - extensions: Optional[List[Extension]] = None, + extensions: Optional[Sequence[Extension]] = None, ) -> "Frame": """ Read a WebSocket frame and return a :class:`Frame` object. @@ -184,7 +184,7 @@ def write( writer: Callable[[bytes], Any], *, mask: bool, - extensions: Optional[List[Extension]] = None, + extensions: Optional[Sequence[Extension]] = None, ) -> None: """ Write a WebSocket frame. diff --git a/src/websockets/headers.py b/src/websockets/headers.py index e2addf4c5..663e71d60 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -9,51 +9,51 @@ import base64 import re -from typing import Callable, List, Optional, Tuple, TypeVar +from typing import Callable, List, NewType, Optional, Sequence, Tuple, TypeVar, cast from .exceptions import InvalidHeaderFormat +from .typing import ExtensionHeader, ExtensionParameter, Subprotocol __all__ = [ "parse_connection", "parse_upgrade", - "parse_extension_list", - "build_extension_list", - "parse_subprotocol_list", - "build_subprotocol_list", + "parse_extension", + "build_extension", + "parse_subprotocol", + "build_subprotocol", ] T = TypeVar("T") -ExtensionParameter = Tuple[str, Optional[str]] -ExtensionParameters = List[ExtensionParameter] -ExtensionHeader = Tuple[str, ExtensionParameters] -SubprotocolHeader = str +ConnectionOption = NewType("ConnectionOption", str) +UpgradeProtocol = NewType("UpgradeProtocol", str) + # To avoid a dependency on a parsing library, we implement manually the ABNF # described in https://tools.ietf.org/html/rfc6455#section-9.1 with the # definitions from https://tools.ietf.org/html/rfc7230#appendix-B. -def peek_ahead(string: str, pos: int) -> Optional[str]: +def peek_ahead(header: str, pos: int) -> Optional[str]: """ - Return the next character from ``string`` at the given position. + Return the next character from ``header`` at the given position. - Return ``None`` at the end of ``string``. + Return ``None`` at the end of ``header``. We never need to peek more than one character ahead. """ - return None if pos == len(string) else string[pos] + return None if pos == len(header) else header[pos] _OWS_re = re.compile(r"[\t ]*") -def parse_OWS(string: str, pos: int) -> int: +def parse_OWS(header: str, pos: int) -> int: """ - Parse optional whitespace from ``string`` at the given position. + Parse optional whitespace from ``header`` at the given position. Return the new position. @@ -61,7 +61,7 @@ def parse_OWS(string: str, pos: int) -> int: """ # There's always a match, possibly empty, whose content doesn't matter. - match = _OWS_re.match(string, pos) + match = _OWS_re.match(header, pos) assert match is not None return match.end() @@ -69,18 +69,18 @@ def parse_OWS(string: str, pos: int) -> int: _token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") -def parse_token(string: str, pos: int, header_name: str) -> Tuple[str, int]: +def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]: """ - Parse a token from ``string`` at the given position. + Parse a token from ``header`` at the given position. Return the token value and the new position. Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - match = _token_re.match(string, pos) + match = _token_re.match(header, pos) if match is None: - raise InvalidHeaderFormat(header_name, "expected token", string=string, pos=pos) + raise InvalidHeaderFormat(header_name, "expected token", header, pos) return match.group(), match.end() @@ -92,31 +92,29 @@ def parse_token(string: str, pos: int, header_name: str) -> Tuple[str, int]: _unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])") -def parse_quoted_string(string: str, pos: int, header_name: str) -> Tuple[str, int]: +def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, int]: """ - Parse a quoted string from ``string`` at the given position. + Parse a quoted string from ``header`` at the given position. Return the unquoted value and the new position. Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - match = _quoted_string_re.match(string, pos) + match = _quoted_string_re.match(header, pos) if match is None: - raise InvalidHeaderFormat( - header_name, "expected quoted string", string=string, pos=pos - ) + raise InvalidHeaderFormat(header_name, "expected quoted string", header, pos) return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end() def parse_list( parse_item: Callable[[str, int, str], Tuple[T, int]], - string: str, + header: str, pos: int, header_name: str, ) -> List[T]: """ - Parse a comma-separated list from ``string`` at the given position. + Parse a comma-separated list from ``header`` at the given position. This is appropriate for parsing values with the following grammar: @@ -124,7 +122,7 @@ def parse_list( ``parse_item`` parses one item. - ``string`` is assumed not to start or end with whitespace. + ``header`` is assumed not to start or end with whitespace. (This function is designed for parsing an entire header value and :func:`~websockets.http.read_headers` strips whitespace from values.) @@ -139,44 +137,57 @@ def parse_list( # while loops that remove extra delimiters. # Remove extra delimiters before the first item. - while peek_ahead(string, pos) == ",": - pos = parse_OWS(string, pos + 1) + while peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) items = [] while True: - # Loop invariant: a item starts at pos in string. - item, pos = parse_item(string, pos, header_name) + # Loop invariant: a item starts at pos in header. + item, pos = parse_item(header, pos, header_name) items.append(item) - pos = parse_OWS(string, pos) + pos = parse_OWS(header, pos) - # We may have reached the end of the string. - if pos == len(string): + # We may have reached the end of the header. + if pos == len(header): break # There must be a delimiter after each element except the last one. - if peek_ahead(string, pos) == ",": - pos = parse_OWS(string, pos + 1) + if peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) else: - raise InvalidHeaderFormat( - header_name, "expected comma", string=string, pos=pos - ) + raise InvalidHeaderFormat(header_name, "expected comma", header, pos) # Remove extra delimiters before the next item. - while peek_ahead(string, pos) == ",": - pos = parse_OWS(string, pos + 1) + while peek_ahead(header, pos) == ",": + pos = parse_OWS(header, pos + 1) - # We may have reached the end of the string. - if pos == len(string): + # We may have reached the end of the header. + if pos == len(header): break - # Since we only advance in the string by one character with peek_ahead() + # Since we only advance in the header by one character with peek_ahead() # or with the end position of a regex match, we can't overshoot the end. - assert pos == len(string) + assert pos == len(header) return items -def parse_connection(string: str) -> List[str]: +def parse_connection_option( + header: str, pos: int, header_name: str +) -> Tuple[ConnectionOption, int]: + """ + Parse a Connection option from ``header`` at the given position. + + Return the protocol value and the new position. + + Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + + """ + item, pos = parse_token(header, pos, header_name) + return cast(ConnectionOption, item), pos + + +def parse_connection(header: str) -> List[ConnectionOption]: """ Parse a ``Connection`` header. @@ -185,7 +196,7 @@ def parse_connection(string: str) -> List[str]: Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_token, string, 0, "Connection") + return parse_list(parse_connection_option, header, 0, "Connection") _protocol_re = re.compile( @@ -193,40 +204,40 @@ def parse_connection(string: str) -> List[str]: ) -def parse_protocol(string: str, pos: int, header_name: str) -> Tuple[str, int]: +def parse_upgrade_protocol( + header: str, pos: int, header_name: str +) -> Tuple[UpgradeProtocol, int]: """ - Parse a protocol from ``string`` at the given position. + Parse an Upgrade protocol from ``header`` at the given position. Return the protocol value and the new position. Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - match = _protocol_re.match(string, pos) + match = _protocol_re.match(header, pos) if match is None: - raise InvalidHeaderFormat( - header_name, "expected protocol", string=string, pos=pos - ) - return match.group(), match.end() + raise InvalidHeaderFormat(header_name, "expected protocol", header, pos) + return cast(UpgradeProtocol, match.group()), match.end() -def parse_upgrade(string: str) -> List[str]: +def parse_upgrade(header: str) -> List[UpgradeProtocol]: """ Parse an ``Upgrade`` header. - Return a list of connection options. + Return a list of protocols. Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_protocol, string, 0, "Upgrade") + return parse_list(parse_upgrade_protocol, header, 0, "Upgrade") -def parse_extension_param( - string: str, pos: int, header_name: str +def parse_extension_item_param( + header: str, pos: int, header_name: str ) -> Tuple[ExtensionParameter, int]: """ - Parse a single extension parameter from ``string`` at the given position. + Parse a single extension parameter from ``header`` at the given position. Return a ``(name, value)`` pair and the new position. @@ -234,36 +245,33 @@ def parse_extension_param( """ # Extract parameter name. - name, pos = parse_token(string, pos, header_name) - pos = parse_OWS(string, pos) + name, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) # Extract parameter value, if there is one. value: Optional[str] = None - if peek_ahead(string, pos) == "=": - pos = parse_OWS(string, pos + 1) - if peek_ahead(string, pos) == '"': + if peek_ahead(header, pos) == "=": + pos = parse_OWS(header, pos + 1) + if peek_ahead(header, pos) == '"': pos_before = pos # for proper error reporting below - value, pos = parse_quoted_string(string, pos, header_name) + value, pos = parse_quoted_string(header, pos, header_name) # https://tools.ietf.org/html/rfc6455#section-9.1 says: the value # after quoted-string unescaping MUST conform to the 'token' ABNF. if _token_re.fullmatch(value) is None: raise InvalidHeaderFormat( - header_name, - "invalid quoted string content", - string=string, - pos=pos_before, + header_name, "invalid quoted header content", header, pos_before ) else: - value, pos = parse_token(string, pos, header_name) - pos = parse_OWS(string, pos) + value, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) return (name, value), pos -def parse_extension( - string: str, pos: int, header_name: str +def parse_extension_item( + header: str, pos: int, header_name: str ) -> Tuple[ExtensionHeader, int]: """ - Parse an extension definition from ``string`` at the given position. + Parse an extension definition from ``header`` at the given position. Return an ``(extension name, parameters)`` pair, where ``parameters`` is a list of ``(name, value)`` pairs, and the new position. @@ -272,18 +280,18 @@ def parse_extension( """ # Extract extension name. - name, pos = parse_token(string, pos, header_name) - pos = parse_OWS(string, pos) + name, pos = parse_token(header, pos, header_name) + pos = parse_OWS(header, pos) # Extract all parameters. parameters = [] - while peek_ahead(string, pos) == ";": - pos = parse_OWS(string, pos + 1) - parameter, pos = parse_extension_param(string, pos, header_name) + while peek_ahead(header, pos) == ";": + pos = parse_OWS(header, pos + 1) + parameter, pos = parse_extension_item_param(header, pos, header_name) parameters.append(parameter) return (name, parameters), pos -def parse_extension_list(string: str) -> List[ExtensionHeader]: +def parse_extension(header: str) -> List[ExtensionHeader]: """ Parse a ``Sec-WebSocket-Extensions`` header. @@ -305,14 +313,17 @@ def parse_extension_list(string: str) -> List[ExtensionHeader]: Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_extension, string, 0, "Sec-WebSocket-Extensions") + return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions") -def build_extension(name: str, parameters: ExtensionParameters) -> str: +parse_extension_list = parse_extension # alias for backwards-compatibility + + +def build_extension_item(name: str, parameters: List[ExtensionParameter]) -> str: """ Build an extension definition. - This is the reverse of :func:`parse_extension`. + This is the reverse of :func:`parse_extension_item`. """ return "; ".join( @@ -325,38 +336,62 @@ def build_extension(name: str, parameters: ExtensionParameters) -> str: ) -def build_extension_list(extensions: List[ExtensionHeader]) -> str: +def build_extension(extensions: Sequence[ExtensionHeader]) -> str: """ Unparse a ``Sec-WebSocket-Extensions`` header. - This is the reverse of :func:`parse_extension_list`. + This is the reverse of :func:`parse_extension`. """ return ", ".join( - build_extension(name, parameters) for name, parameters in extensions + build_extension_item(name, parameters) for name, parameters in extensions ) -def parse_subprotocol_list(string: str) -> List[SubprotocolHeader]: +build_extension_list = build_extension # alias for backwards-compatibility + + +def parse_subprotocol_item( + header: str, pos: int, header_name: str +) -> Tuple[Subprotocol, int]: + """ + Parse a subprotocol from ``header`` at the given position. + + Return the subprotocol value and the new position. + + Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + + """ + item, pos = parse_token(header, pos, header_name) + return cast(Subprotocol, item), pos + + +def parse_subprotocol(header: str) -> List[Subprotocol]: """ Parse a ``Sec-WebSocket-Protocol`` header. Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. """ - return parse_list(parse_token, string, 0, "Sec-WebSocket-Protocol") + return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol") + +parse_subprotocol_list = parse_subprotocol # alias for backwards-compatibility -def build_subprotocol_list(protocols: List[SubprotocolHeader]) -> str: + +def build_subprotocol(protocols: Sequence[Subprotocol]) -> str: """ Unparse a ``Sec-WebSocket-Protocol`` header. - This is the reverse of :func:`parse_subprotocol_list`. + This is the reverse of :func:`parse_subprotocol`. """ return ", ".join(protocols) +build_subprotocol_list = build_subprotocol # alias for backwards-compatibility + + def build_basic_auth(username: str, password: str) -> str: """ Build an Authorization header for HTTP Basic Auth. diff --git a/src/websockets/server.py b/src/websockets/server.py index efb3ebee3..b20f4b80d 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -40,12 +40,13 @@ from .handshake import build_response, check_request from .headers import ( ExtensionHeader, - build_extension_list, - parse_extension_list, - parse_subprotocol_list, + build_extension, + parse_extension, + parse_subprotocol, ) from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request from .protocol import State, WebSocketCommonProtocol +from .typing import Origin, Subprotocol __all__ = ["serve", "unix_serve", "WebSocketServerProtocol"] @@ -78,9 +79,9 @@ def __init__( ws_handler: Callable[["WebSocketServerProtocol", str], Awaitable[Any]], ws_server: "WebSocketServer", *, - origins: Optional[List[Optional[str]]] = None, - extensions: Optional[List[ServerExtensionFactory]] = None, - subprotocols: Optional[List[str]] = None, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, process_request: Optional[ Callable[ @@ -88,7 +89,9 @@ def __init__( Union[Optional[HTTPResponse], Awaitable[Optional[HTTPResponse]]], ] ] = None, - select_subprotocol: Optional[Callable[[List[str], List[str]], str]] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, **kwds: Any, ) -> None: # For backwards-compatibility with 6.0 or earlier. @@ -301,8 +304,8 @@ def process_request( @staticmethod def process_origin( - headers: Headers, origins: Optional[List[Optional[str]]] = None - ) -> Optional[str]: + headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None + ) -> Optional[Origin]: """ Handle the Origin HTTP request header. @@ -313,7 +316,7 @@ def process_origin( # "The user agent MUST NOT include more than one Origin header field" # per https://tools.ietf.org/html/rfc6454#section-7.3. try: - origin = headers.get("Origin") + origin = cast(Origin, headers.get("Origin")) except MultipleValuesError: raise InvalidHeader("Origin", "more than one Origin header found") if origins is not None: @@ -323,7 +326,8 @@ def process_origin( @staticmethod def process_extensions( - headers: Headers, available_extensions: Optional[List[ServerExtensionFactory]] + headers: Headers, + available_extensions: Optional[Sequence[ServerExtensionFactory]], ) -> Tuple[Optional[str], List[Extension]]: """ Handle the Sec-WebSocket-Extensions HTTP request header. @@ -367,8 +371,7 @@ def process_extensions( if header_values and available_extensions: parsed_header_values: List[ExtensionHeader] = sum( - [parse_extension_list(header_value) for header_value in header_values], - [], + [parse_extension(header_value) for header_value in header_values], [] ) for name, request_params in parsed_header_values: @@ -399,14 +402,14 @@ def process_extensions( # Serialize extension header. if extension_headers: - response_header_value = build_extension_list(extension_headers) + response_header_value = build_extension(extension_headers) return response_header_value, accepted_extensions # Not @staticmethod because it calls self.select_subprotocol() def process_subprotocol( - self, headers: Headers, available_subprotocols: Optional[List[str]] - ) -> Optional[str]: + self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: """ Handle the Sec-WebSocket-Protocol HTTP request header. @@ -414,18 +417,14 @@ def process_subprotocol( as the selected subprotocol. """ - subprotocol: Optional[str] = None + subprotocol: Optional[Subprotocol] = None header_values = headers.get_all("Sec-WebSocket-Protocol") if header_values and available_subprotocols: - parsed_header_values: List[str] = sum( - [ - parse_subprotocol_list(header_value) - for header_value in header_values - ], - [], + parsed_header_values: List[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] ) subprotocol = self.select_subprotocol( @@ -435,8 +434,10 @@ def process_subprotocol( return subprotocol def select_subprotocol( - self, client_subprotocols: List[str], server_subprotocols: List[str] - ) -> Optional[str]: + self, + client_subprotocols: Sequence[Subprotocol], + server_subprotocols: Sequence[Subprotocol], + ) -> Optional[Subprotocol]: """ Pick a subprotocol among those offered by the client. @@ -469,9 +470,9 @@ def select_subprotocol( async def handshake( self, - origins: Optional[List[Optional[str]]] = None, - available_extensions: Optional[List[ServerExtensionFactory]] = None, - available_subprotocols: Optional[List[str]] = None, + origins: Optional[Sequence[Optional[Origin]]] = None, + available_extensions: Optional[Sequence[ServerExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, ) -> str: """ @@ -815,14 +816,16 @@ def __init__( klass: Type[WebSocketServerProtocol] = WebSocketServerProtocol, timeout: float = 10, compression: Optional[str] = "deflate", - origins: Optional[List[Optional[str]]] = None, - extensions: Optional[List[ServerExtensionFactory]] = None, - subprotocols: Optional[List[str]] = None, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, process_request: Optional[ Callable[[str, Headers], Optional[HTTPResponse]] ] = None, - select_subprotocol: Optional[Callable[[List[str], List[str]], str]] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, **kwds: Any, ): # Backwards-compatibility: close_timeout used to be called timeout. @@ -849,7 +852,7 @@ def __init__( ext_factory.name == ServerPerMessageDeflateFactory.name for ext_factory in extensions ): - extensions.append(ServerPerMessageDeflateFactory()) + extensions = list(extensions) + [ServerPerMessageDeflateFactory()] elif compression is not None: raise ValueError(f"Unsupported compression: {compression}") diff --git a/src/websockets/typing.py b/src/websockets/typing.py index 2f0c50c59..651b40bbe 100644 --- a/src/websockets/typing.py +++ b/src/websockets/typing.py @@ -1,7 +1,7 @@ -from typing import Union +from typing import List, NewType, Optional, Tuple, Union -__all__ = ["Data"] +__all__ = ["Data", "Origin", "ExtensionHeader", "ExtensionParameter", "Subprotocol"] Data = Union[str, bytes] @@ -17,3 +17,12 @@ Data.__doc__ = Data__doc__ # type: ignore except AttributeError: # pragma: no cover pass + + +Origin = NewType("Origin", str) + +ExtensionParameter = Tuple[str, Optional[str]] + +ExtensionHeader = Tuple[str, List[ExtensionParameter]] + +Subprotocol = NewType("Subprotocol", str) diff --git a/tests/test_headers.py b/tests/test_headers.py index f03dc83cf..51a0f33af 100644 --- a/tests/test_headers.py +++ b/tests/test_headers.py @@ -41,7 +41,7 @@ def test_parse_upgrade_invalid_header(self): with self.assertRaises(InvalidHeaderFormat): parse_upgrade(header) - def test_parse_extension_list(self): + def test_parse_extension(self): for header, parsed in [ # Synthetic examples ("foo", [("foo", [])]), @@ -78,12 +78,12 @@ def test_parse_extension_list(self): ), ]: with self.subTest(header=header): - self.assertEqual(parse_extension_list(header), parsed) - # Also ensure that build_extension_list round-trips cleanly. - unparsed = build_extension_list(parsed) - self.assertEqual(parse_extension_list(unparsed), parsed) + self.assertEqual(parse_extension(header), parsed) + # Also ensure that build_extension round-trips cleanly. + unparsed = build_extension(parsed) + self.assertEqual(parse_extension(unparsed), parsed) - def test_parse_extension_list_invalid_header(self): + def test_parse_extension_invalid_header(self): for header in [ # Truncated examples "", @@ -99,9 +99,9 @@ def test_parse_extension_list_invalid_header(self): ]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): - parse_extension_list(header) + parse_extension(header) - def test_parse_subprotocol_list(self): + def test_parse_subprotocol(self): for header, parsed in [ # Synthetic examples ("foo", ["foo"]), @@ -110,12 +110,12 @@ def test_parse_subprotocol_list(self): (",\t, , ,foo ,, bar,baz,,", ["foo", "bar", "baz"]), ]: with self.subTest(header=header): - self.assertEqual(parse_subprotocol_list(header), parsed) - # Also ensure that build_subprotocol_list round-trips cleanly. - unparsed = build_subprotocol_list(parsed) - self.assertEqual(parse_subprotocol_list(unparsed), parsed) + self.assertEqual(parse_subprotocol(header), parsed) + # Also ensure that build_subprotocol round-trips cleanly. + unparsed = build_subprotocol(parsed) + self.assertEqual(parse_subprotocol(unparsed), parsed) - def test_parse_subprotocol_list_invalid_header(self): + def test_parse_subprotocol_invalid_header(self): for header in [ # Truncated examples "", @@ -125,7 +125,7 @@ def test_parse_subprotocol_list_invalid_header(self): ]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): - parse_subprotocol_list(header) + parse_subprotocol(header) def test_build_basic_auth(self): # Test vector from RFC 7617. From 15018fdc1413aa7b720009a1b2ecddb56509fdf8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 19:21:57 +0100 Subject: [PATCH 055/281] Normalize return value declaration on __init__. -> None is optional, but I included in on most constructors, so I'm adding the missing ones. --- src/websockets/client.py | 2 +- src/websockets/extensions/permessage_deflate.py | 6 +++--- src/websockets/server.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 9cefaedb8..d9ad668ed 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -384,7 +384,7 @@ def __init__( subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, **kwds: Any, - ): + ) -> None: if loop is None: loop = asyncio.get_event_loop() diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 145cb2bbe..2de27260f 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -45,7 +45,7 @@ def __init__( remote_max_window_bits: int, local_max_window_bits: int, compress_settings: Optional[Dict[Any, Any]] = None, - ): + ) -> None: """ Configure the Per-Message Deflate extension. @@ -285,7 +285,7 @@ def __init__( server_max_window_bits: Optional[int] = None, client_max_window_bits: Optional[Union[int, bool]] = None, compress_settings: Optional[Dict[Any, Any]] = None, - ): + ) -> None: """ Configure the Per-Message Deflate extension factory. @@ -458,7 +458,7 @@ def __init__( server_max_window_bits: Optional[int] = None, client_max_window_bits: Optional[int] = None, compress_settings: Optional[Dict[Any, Any]] = None, - ): + ) -> None: """ Configure the Per-Message Deflate extension factory. diff --git a/src/websockets/server.py b/src/websockets/server.py index b20f4b80d..d99308156 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -580,7 +580,7 @@ class WebSocketServer: """ - def __init__(self, loop: asyncio.AbstractEventLoop): + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: # Store a reference to loop to avoid relying on self.server._loop. self.loop = loop @@ -827,7 +827,7 @@ def __init__( Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] ] = None, **kwds: Any, - ): + ) -> None: # Backwards-compatibility: close_timeout used to be called timeout. # If both are specified, timeout is ignored. if close_timeout is None: From 37ef1172ff09073a083f18e6373e7ec9f5e03063 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 20:52:21 +0100 Subject: [PATCH 056/281] Improve display of type hints in docs. This requires further work to add :param: declarations in docstrings. --- docs/conf.py | 1 + docs/requirements.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 504656afc..f4e81db35 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,6 +29,7 @@ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', + 'sphinx_autodoc_typehints', 'sphinxcontrib_trio', ] diff --git a/docs/requirements.txt b/docs/requirements.txt index 954e8c755..0eaf94fbe 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,4 @@ sphinx +sphinx-autodoc-typehints sphinxcontrib-spelling sphinxcontrib-trio From b1b3917a5e5bce88bc9f87d211928a2eabd437ad Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 21:01:23 +0100 Subject: [PATCH 057/281] Removed circular dependency in exceptions and uri. --- src/websockets/client.py | 7 ++++--- src/websockets/exceptions.py | 12 +++--------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index d9ad668ed..8e2bcf36e 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -291,7 +291,7 @@ async def handshake( if status_code in (301, 302, 303, 307, 308): if "Location" not in response_headers: raise InvalidMessage("Redirect response missing Location") - raise RedirectHandshake(parse_uri(response_headers["Location"])) + raise RedirectHandshake(response_headers["Location"]) elif status_code != 101: raise InvalidStatusCode(status_code) @@ -518,9 +518,10 @@ async def __await_impl__(self) -> WebSocketClientProtocol: await protocol.wait_closed() raise except RedirectHandshake as e: - if self._wsuri.secure and not e.wsuri.secure: + wsuri = parse_uri(e.uri) + if self._wsuri.secure and not wsuri.secure: raise InvalidHandshake("Redirect dropped TLS") - self._wsuri = e.wsuri + self._wsuri = wsuri continue # redirection chain continues else: raise InvalidHandshake("Maximum redirects exceeded") diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 436c594a9..73eb8bb79 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -1,15 +1,9 @@ import http -from typing import TYPE_CHECKING, Any, Optional +from typing import Optional from .http import Headers, HeadersLike -if TYPE_CHECKING: # pragma: no cover - from .uri import WebSocketURI -else: - WebSocketURI = Any - - __all__ = [ "AbortHandshake", "ConnectionClosed", @@ -61,8 +55,8 @@ class RedirectHandshake(InvalidHandshake): """ - def __init__(self, wsuri: WebSocketURI) -> None: - self.wsuri = wsuri + def __init__(self, uri: str) -> None: + self.uri = uri class InvalidMessage(InvalidHandshake): From 6e1766d61983f0069365de3922243b399876b794 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 21:14:43 +0100 Subject: [PATCH 058/281] Handle a circular import less inelegantly. This preserves the correct type annotation even when TYPE_CHECKING is False, which seems better (e.g. for doc generation). --- src/websockets/framing.py | 24 +++++++----------------- 1 file changed, 7 insertions(+), 17 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 1409c7d69..5b694fd40 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -12,26 +12,12 @@ import io import random import struct -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - NamedTuple, - Optional, - Sequence, - Tuple, -) +from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple from .exceptions import PayloadTooBig, WebSocketProtocolError from .typing import Data -if TYPE_CHECKING: # pragma: no cover - from .extensions.base import Extension -else: - Extension = Any - try: from .speedups import apply_mask except ImportError: # pragma: no cover @@ -112,7 +98,7 @@ async def read( *, mask: bool, max_size: Optional[int] = None, - extensions: Optional[Sequence[Extension]] = None, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, ) -> "Frame": """ Read a WebSocket frame and return a :class:`Frame` object. @@ -184,7 +170,7 @@ def write( writer: Callable[[bytes], Any], *, mask: bool, - extensions: Optional[Sequence[Extension]] = None, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, ) -> None: """ Write a WebSocket frame. @@ -373,3 +359,7 @@ def check_close(code: int) -> None: """ if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): raise WebSocketProtocolError("Invalid status code") + + +# at the bottom to allow circular import, because Extension depends on Frame +import websockets.extensions.base # isort:skip # noqa From dcca6efd750bd42062fe9cb3ecb822a7cae75d19 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 21:40:05 +0100 Subject: [PATCH 059/281] Simplify mock. The code that was using this no longer exists. --- tests/test_protocol.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 154948e43..1f35e65a2 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -70,9 +70,6 @@ def write_eof(self): self.loop.call_soon(self.close) self._eof = True - def is_closing(self): - return self._closing - def close(self): # Simulate how actual transports drop the connection. if not self._closing: From e4cec94ceacbb039ced993dd0d4fd00c4761cd90 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 10 Feb 2019 14:14:33 +0100 Subject: [PATCH 060/281] Document bugfix releases in the changelog. Mostly so users don't wonder why these releases exist. Fix #572. --- docs/changelog.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1c4b1bc96..c59e569d1 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -149,6 +149,12 @@ Also: * Added compatibility with Python 3.7. +5.0.1 +..... + +* Fixed a regression in the 5.0 release that broke some invocations of + :func:`~server.serve()` and :func:`~client.connect()`. + 5.0 ... @@ -208,6 +214,11 @@ Also: * Prevented processing of incoming frames after failing the connection. +4.0.1 +..... + +* Fixed issues with the packaging of the 4.0 release. + 4.0 ... From ca86a14837d4adbcd4256688f0b0385b65fc304e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 10 Feb 2019 14:30:03 +0100 Subject: [PATCH 061/281] Improve changelog. Reduce the "wall of red" by downgrading less important backwards-incompatible changes from warning to note. --- docs/changelog.rst | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c59e569d1..169cb829f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,7 +12,7 @@ Changelog **Version 8.0 drops compatibility with Python 3.4 and 3.5.** -.. warning:: +.. note:: **Version 8.0 adds the reason phrase to the return type of the low-level API** :func:`~http.read_response` **.** @@ -65,7 +65,7 @@ Also: closed = asyncio.ensure_future(websocket.wait_closed()) closed.add_done_callback(lambda task: task.cancel()) -.. warning:: +.. note:: **Version 7.0 changes how a** :meth:`~protocol.WebSocketCommonProtocol.ping` **that hasn't received a pong yet behaves when the connection is closed.** @@ -75,7 +75,7 @@ Also: :exc:`~asyncio.CancelledError`. Now ``await ping`` raises :exc:`~exceptions.ConnectionClosed` like other public APIs. -.. warning:: +.. note:: **Version 7.0 raises a** :exc:`RuntimeError` **exception if two coroutines call** :meth:`~protocol.WebSocketCommonProtocol.recv` **concurrently.** @@ -93,7 +93,7 @@ Also: :func:`~server.serve()` and :class:`~server.WebSocketServerProtocol` to customize :meth:`~server.WebSocketServerProtocol.process_request` and :meth:`~server.WebSocketServerProtocol.select_subprotocol` without - subclassing :class:`~server.WebSocketServerProtocol` + subclassing :class:`~server.WebSocketServerProtocol`. * Added support for sending fragmented messages. @@ -142,8 +142,10 @@ Also: * Functions defined in the :mod:`~http` module now return HTTP headers as :class:`~http.Headers` instead of lists of ``(name, value)`` pairs. - Note that :class:`~http.Headers` and :class:`~http.client.HTTPMessage` - provide similar APIs. + Since :class:`~http.Headers` and :class:`~http.client.HTTPMessage` provide + similar APIs, this change won't affect most of the code dealing with HTTP + headers. + Also: @@ -164,9 +166,11 @@ Also: websockets 4.0 was vulnerable to denial of service by memory exhaustion because it didn't enforce ``max_size`` when decompressing compressed - messages (CVE-2018-1000518). + messages (`CVE-2018-1000518`_). -.. warning:: + .. _CVE-2018-1000518: https://nvd.nist.gov/vuln/detail/CVE-2018-1000518 + +.. note:: **Version 5.0 adds a** ``user_info`` **field to the return value of** :func:`~uri.parse_uri` **and** :class:`~uri.WebSocketURI` **.** @@ -188,7 +192,8 @@ Also: * :func:`~server.unix_serve` can be used as an asynchronous context manager on Python ≥ 3.5.1. -* Added :meth:`~protocol.WebSocketCommonProtocol.closed` property. +* Added the :attr:`~protocol.WebSocketCommonProtocol.closed` property to + protocols. * If a :meth:`~protocol.WebSocketCommonProtocol.ping` doesn't receive a pong, it's canceled when the connection is closed. @@ -235,6 +240,10 @@ Also: .. warning:: + **Version 4.0 drops compatibility with Python 3.3.** + +.. note:: + **Version 4.0 removes the** ``state_name`` **attribute of protocols.** Use ``protocol.state.name`` instead of ``protocol.state_name``. @@ -246,7 +255,8 @@ Also: * Added :func:`~server.unix_serve` for listening on Unix sockets. -* Added the :attr:`~server.WebSocketServer.sockets` attribute. +* Added the :attr:`~server.WebSocketServer.sockets` attribute to the return + value of :func:`~server.serve`. * Reorganized and extended documentation. @@ -278,7 +288,7 @@ Also: * Rewrote HTTP handling for simplicity and performance. -* Added an optional C extension to speed up low level operations. +* Added an optional C extension to speed up low-level operations. * An invalid response status code during :func:`~client.connect()` now raises :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute. From 569042a682eaa0b5e5a953bea9f3d22832b8dea1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 10 Feb 2019 18:52:14 +0100 Subject: [PATCH 062/281] Simplify implementation of __iter__. --- src/websockets/client.py | 18 ++++++++++-------- src/websockets/server.py | 18 ++++++++++-------- 2 files changed, 20 insertions(+), 16 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 8e2bcf36e..6adb5ca23 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -484,9 +484,7 @@ async def _creating_connection( protocol = cast(WebSocketClientProtocol, protocol) return transport, protocol - @asyncio.coroutine - def __iter__(self) -> Generator[Any, None, WebSocketClientProtocol]: - return (yield from self.__await__()) + # async with connect(...) async def __aenter__(self) -> WebSocketClientProtocol: return await self @@ -499,6 +497,12 @@ async def __aexit__( ) -> None: await self.ws_client.close() + # await connect(...) + + def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl__().__await__() + async def __await_impl__(self) -> WebSocketClientProtocol: for redirects in range(self.MAX_REDIRECTS_ALLOWED): transport, protocol = await self._creating_connection() @@ -529,11 +533,9 @@ async def __await_impl__(self) -> WebSocketClientProtocol: self.ws_client = protocol return protocol - def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: - # __await__() must return a type that I don't know how to obtain except - # by calling __await__() on the return value of an async function. - # I'm not finding a better way to take advantage of PEP 492. - return self.__await_impl__().__await__() + # yield from connect(...) + + __iter__ = __await__ connect = Connect diff --git a/src/websockets/server.py b/src/websockets/server.py index d99308156..7137148a0 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -891,9 +891,7 @@ def __init__( self._creating_server = creating_server self.ws_server = ws_server - @asyncio.coroutine - def __iter__(self) -> Generator[Any, None, WebSocketServer]: - return (yield from self.__await__()) + # async with serve(...) async def __aenter__(self) -> WebSocketServer: return await self @@ -907,16 +905,20 @@ async def __aexit__( self.ws_server.close() await self.ws_server.wait_closed() + # await serve(...) + + def __await__(self) -> Generator[Any, None, WebSocketServer]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl__().__await__() + async def __await_impl__(self) -> WebSocketServer: server = await self._creating_server self.ws_server.wrap(server) return self.ws_server - def __await__(self) -> Generator[Any, None, WebSocketServer]: - # __await__() must return a type that I don't know how to obtain except - # by calling __await__() on the return value of an async function. - # I'm not finding a better way to take advantage of PEP 492. - return self.__await_impl__().__await__() + # yield from serve(...) + + __iter__ = __await__ serve = Serve From 217477fa1119c26cccf30bbfb07573444839bb27 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 17 Feb 2019 08:58:02 +0100 Subject: [PATCH 063/281] Restore support for unbounded incoming message queues. The API change is backwards incompatible. However, None is a better value than 0 to mean "no limit" and users already hit the backwards incompatibility when they upgraded to 7.0, which broke the feature. For this reason I didn't include a backwards compatibility shim. Thanks @petr-fedorov for the report. Fix #576. --- docs/changelog.rst | 8 +++++++- src/websockets/protocol.py | 32 +++++++++++++++++--------------- tests/test_protocol.py | 15 +++++++++++++++ 3 files changed, 39 insertions(+), 16 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 169cb829f..30f542b54 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -12,12 +12,18 @@ Changelog **Version 8.0 drops compatibility with Python 3.4 and 3.5.** +.. note:: + + **Version 8.0 changes the behavior of the ``max_queue`` parameter.** + + If you were setting ``max_queue=0`` to make the queue of incoming messages + unbounded, change it to ``max_queue=None``. + .. note:: **Version 8.0 adds the reason phrase to the return type of the low-level API** :func:`~http.read_response` **.** - Also: * :meth:`~protocol.WebSocketCommonProtocol.send`, diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index f4dbbb279..a663d2ab2 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -116,15 +116,16 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): raise :exc:`~websockets.exceptions.ConnectionClosed` and the connection will be closed with status code 1009. - The ``max_queue`` parameter sets the maximum length of the queue that holds - incoming messages. The default value is 32. 0 disables the limit. Messages - are added to an in-memory queue when they're received; then :meth:`recv()` - pops from that queue. In order to prevent excessive memory consumption when - messages are received faster than they can be processed, the queue must be - bounded. If the queue fills up, the protocol stops processing incoming data - until :meth:`recv()` is called. In this situation, various receive buffers - (at least in ``asyncio`` and in the OS) will fill up, then the TCP receive - window will shrink, slowing down transmission to avoid packet loss. + The ``max_queue`` parameter sets the maximum length of the queue that + holds incoming messages. The default value is ``32``. ``None`` disables + the limit. Messages are added to an in-memory queue when they're received; + then :meth:`recv()` pops from that queue. In order to prevent excessive + memory consumption when messages are received faster than they can be + processed, the queue must be bounded. If the queue fills up, the protocol + stops processing incoming data until :meth:`recv()` is called. In this + situation, various receive buffers (at least in ``asyncio`` and in the OS) + will fill up, then the TCP receive window will shrink, slowing down + transmission to avoid packet loss. Since Python can use up to 4 bytes of memory to represent a single character, each websocket connection may use up to ``4 * max_size * @@ -709,12 +710,13 @@ async def transfer_data(self) -> None: break # Wait until there's room in the queue (if necessary). - while len(self.messages) >= self.max_queue: - self._put_message_waiter = self.loop.create_future() - try: - await self._put_message_waiter - finally: - self._put_message_waiter = None + if self.max_queue is not None: + while len(self.messages) >= self.max_queue: + self._put_message_waiter = self.loop.create_future() + try: + await self._put_message_waiter + finally: + self._put_message_waiter = None # Put the message in the queue. self.messages.append(message) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 1f35e65a2..0113e4a71 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -474,6 +474,21 @@ def test_recv_queue_full(self): self.loop.run_until_complete(self.protocol.recv()) self.assertEqual(list(self.protocol.messages), []) + def test_recv_queue_no_limit(self): + self.protocol.max_queue = None + + for _ in range(100): + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + self.run_loop_once() + + # Incoming message queue can contain at least 100 messages. + self.assertEqual(list(self.protocol.messages), ["café"] * 100) + + for _ in range(100): + self.loop.run_until_complete(self.protocol.recv()) + + self.assertEqual(list(self.protocol.messages), []) + def test_recv_other_error(self): async def read_message(): raise Exception("BOOM") From 68e7c04a068827d61f18adfbbb979d80e19e0221 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 17 Feb 2019 10:41:44 +0100 Subject: [PATCH 064/281] Add warnings for backwards compatibility shims. This will make it easier to remove them eventually. --- src/websockets/client.py | 23 ++++++++---- src/websockets/protocol.py | 7 +++- src/websockets/server.py | 12 +++++- tests/test_client_server.py | 73 ++++++++++++++++++++++++++++++------- tests/test_protocol.py | 14 +++++++ 5 files changed, 105 insertions(+), 24 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 6adb5ca23..3d057a2e3 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -6,6 +6,7 @@ import asyncio import collections.abc import logging +import warnings from types import TracebackType from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast @@ -376,8 +377,8 @@ def __init__( write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, legacy_recv: bool = False, - klass: Type[WebSocketClientProtocol] = WebSocketClientProtocol, - timeout: float = 10, + klass: Optional[Type[WebSocketClientProtocol]] = None, + timeout: Optional[float] = None, compression: Optional[str] = "deflate", origin: Optional[Origin] = None, extensions: Optional[Sequence[ClientExtensionFactory]] = None, @@ -385,19 +386,27 @@ def __init__( extra_headers: Optional[HeadersLike] = None, **kwds: Any, ) -> None: - if loop is None: - loop = asyncio.get_event_loop() - - # Backwards-compatibility: close_timeout used to be called timeout. + # Backwards compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) # If both are specified, timeout is ignored. if close_timeout is None: close_timeout = timeout - # Backwards-compatibility: create_protocol used to be called klass. + # Backwards compatibility: create_protocol used to be called klass. + if klass is None: + klass = WebSocketClientProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) # If both are specified, klass is ignored. if create_protocol is None: create_protocol = klass + if loop is None: + loop = asyncio.get_event_loop() + self._wsuri = parse_uri(uri) if self._wsuri.secure: kwds.setdefault("ssl", True) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index a663d2ab2..b0fff8fad 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -14,6 +14,7 @@ import logging import random import struct +import warnings from typing import ( Any, AsyncIterable, @@ -182,9 +183,13 @@ def __init__( write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, legacy_recv: bool = False, - timeout: float = 10, + timeout: Optional[float] = None, ) -> None: # Backwards-compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) # If both are specified, timeout is ignored. if close_timeout is None: close_timeout = timeout diff --git a/src/websockets/server.py b/src/websockets/server.py index 7137148a0..fca6c2caf 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -813,8 +813,8 @@ def __init__( write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, legacy_recv: bool = False, - klass: Type[WebSocketServerProtocol] = WebSocketServerProtocol, - timeout: float = 10, + klass: Optional[Type[WebSocketServerProtocol]] = None, + timeout: Optional[float] = None, compression: Optional[str] = "deflate", origins: Optional[Sequence[Optional[Origin]]] = None, extensions: Optional[Sequence[ServerExtensionFactory]] = None, @@ -829,11 +829,19 @@ def __init__( **kwds: Any, ) -> None: # Backwards-compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) # If both are specified, timeout is ignored. if close_timeout is None: close_timeout = timeout # Backwards-compatibility: create_protocol used to be called klass. + if klass is None: + klass = WebSocketServerProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) # If both are specified, klass is ignored. if create_protocol is None: create_protocol = klass diff --git a/tests/test_client_server.py b/tests/test_client_server.py index fc88b3139..83b1e0fd9 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -249,13 +249,23 @@ def run_loop_once(self): def server_context(self): return None - def start_server(self, **kwds): + def start_server(self, expected_warning=None, **kwds): # Disable compression by default in tests. kwds.setdefault("compression", None) # Disable pings by default in tests. kwds.setdefault("ping_interval", None) - start_server = serve(handler, "localhost", 0, **kwds) - self.server = self.loop.run_until_complete(start_server) + + with warnings.catch_warnings(record=True) as recorded_warnings: + start_server = serve(handler, "localhost", 0, **kwds) + self.server = self.loop.run_until_complete(start_server) + + if expected_warning is None: + self.assertEqual(len(recorded_warnings), 0) + else: + self.assertEqual(len(recorded_warnings), 1) + actual_warning = recorded_warnings[0].message + self.assertEqual(str(actual_warning), expected_warning) + self.assertEqual(type(actual_warning), DeprecationWarning) def start_redirecting_server( self, status, include_location=True, force_insecure=False @@ -278,7 +288,9 @@ def _process_request(path, headers): ) self.redirecting_server = self.loop.run_until_complete(start_server) - def start_client(self, resource_name="/", user_info=None, **kwds): + def start_client( + self, resource_name="/", user_info=None, expected_warning=None, **kwds + ): # Disable compression by default in tests. kwds.setdefault("compression", None) # Disable pings by default in tests. @@ -286,8 +298,18 @@ def start_client(self, resource_name="/", user_info=None, **kwds): secure = kwds.get("ssl") is not None server = self.redirecting_server if self.redirecting_server else self.server server_uri = get_server_uri(server, secure, resource_name, user_info) - start_client = connect(server_uri, **kwds) - self.client = self.loop.run_until_complete(start_client) + + with warnings.catch_warnings(record=True) as recorded_warnings: + start_client = connect(server_uri, **kwds) + self.client = self.loop.run_until_complete(start_client) + + if expected_warning is None: + self.assertEqual(len(recorded_warnings), 0) + else: + self.assertEqual(len(recorded_warnings), 1) + actual_warning = recorded_warnings[0].message + self.assertEqual(str(actual_warning), expected_warning) + self.assertEqual(type(actual_warning), DeprecationWarning) def stop_client(self): try: @@ -638,12 +660,17 @@ def test_server_create_protocol(self): def test_server_create_protocol_function(self): self.assert_client_raises_code(401) - @with_server(klass=UnauthorizedServerProtocol) + @with_server( + klass=UnauthorizedServerProtocol, + expected_warning="rename klass to create_protocol", + ) def test_server_klass_backwards_compatibility(self): self.assert_client_raises_code(401) @with_server( - create_protocol=ForbiddenServerProtocol, klass=UnauthorizedServerProtocol + create_protocol=ForbiddenServerProtocol, + klass=UnauthorizedServerProtocol, + expected_warning="rename klass to create_protocol", ) def test_server_create_protocol_over_klass(self): self.assert_client_raises_code(403) @@ -662,12 +689,21 @@ def test_client_create_protocol_function(self): self.assertIsInstance(self.client, FooClientProtocol) @with_server() - @with_client("/path", klass=FooClientProtocol) + @with_client( + "/path", + klass=FooClientProtocol, + expected_warning="rename klass to create_protocol", + ) def test_client_klass(self): self.assertIsInstance(self.client, FooClientProtocol) @with_server() - @with_client("/path", create_protocol=BarClientProtocol, klass=FooClientProtocol) + @with_client( + "/path", + create_protocol=BarClientProtocol, + klass=FooClientProtocol, + expected_warning="rename klass to create_protocol", + ) def test_client_create_protocol_over_klass(self): self.assertIsInstance(self.client, BarClientProtocol) @@ -677,13 +713,15 @@ def test_server_close_timeout(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 7) - @with_server(timeout=6) + @with_server(timeout=6, expected_warning="rename timeout to close_timeout") @with_client("/close_timeout") def test_server_timeout_backwards_compatibility(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 6) - @with_server(close_timeout=7, timeout=6) + @with_server( + close_timeout=7, timeout=6, expected_warning="rename timeout to close_timeout" + ) @with_client("/close_timeout") def test_server_close_timeout_over_timeout(self): close_timeout = self.loop.run_until_complete(self.client.recv()) @@ -695,12 +733,19 @@ def test_client_close_timeout(self): self.assertEqual(self.client.close_timeout, 7) @with_server() - @with_client("/close_timeout", timeout=6) + @with_client( + "/close_timeout", timeout=6, expected_warning="rename timeout to close_timeout" + ) def test_client_timeout_backwards_compatibility(self): self.assertEqual(self.client.close_timeout, 6) @with_server() - @with_client("/close_timeout", close_timeout=7, timeout=6) + @with_client( + "/close_timeout", + close_timeout=7, + timeout=6, + expected_warning="rename timeout to close_timeout", + ) def test_client_close_timeout_over_timeout(self): self.assertEqual(self.client.close_timeout, 7) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 0113e4a71..976cc7e9b 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -5,6 +5,7 @@ import time import unittest import unittest.mock +import warnings from websockets.exceptions import ConnectionClosed, InvalidState from websockets.framing import * @@ -321,6 +322,19 @@ def assertCompletesWithin(self, min_time, max_time): self.assertGreaterEqual(dt, min_time, f"Too fast: {dt} < {min_time}") self.assertLess(dt, max_time, f"Too slow: {dt} >= {max_time}") + # Test constructor. + + def test_timeout_backwards_compatibility(self): + with warnings.catch_warnings(record=True) as recorded_warnings: + protocol = WebSocketCommonProtocol(timeout=5) + + self.assertEqual(protocol.close_timeout, 5) + + self.assertEqual(len(recorded_warnings), 1) + warning = recorded_warnings[0].message + self.assertEqual(str(warning), "rename timeout to close_timeout") + self.assertEqual(type(warning), DeprecationWarning) + # Test public attributes. def test_local_address(self): From 17b3f47549b6f752a1be07fa1ba3037cb59c7d56 Mon Sep 17 00:00:00 2001 From: Pablo Marti Date: Tue, 9 Apr 2019 13:12:18 +0200 Subject: [PATCH 065/281] remove extra backtick --- docs/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/intro.rst b/docs/intro.rst index 389896ef4..118167b73 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -71,7 +71,7 @@ This client needs a context because the server uses a self-signed certificate. A client connecting to a secure WebSocket server with a valid certificate (i.e. signed by a CA that your Python installation trusts) can simply pass -``ssl=True`` to :func:`connect`` instead of building a context. +``ssl=True`` to :func:`connect` instead of building a context. Browser-based example --------------------- From acb7a939ae2db4fb977cb92d8e9101c3ee82c0d3 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 8 May 2019 13:59:58 +0200 Subject: [PATCH 066/281] Small cleanup. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 30dbfd9c1..d389623a7 100644 --- a/Makefile +++ b/Makefile @@ -19,4 +19,4 @@ coverage: clean: find . -name '*.pyc' -o -name '*.so' -delete find . -name __pycache__ -delete - rm -rf .coverage build compliance/reports dist docs/_build htmlcov MANIFEST README src/websockets.egg-info + rm -rf .coverage build compliance/reports dist docs/_build htmlcov MANIFEST src/websockets.egg-info From 52872a5485651d606900cddd720b4a4aa658d690 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 8 May 2019 14:03:50 +0200 Subject: [PATCH 067/281] Add changelog entry for 7d72dabd. --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 30f542b54..9618c1d4b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -34,6 +34,8 @@ Also: * :func:`~client.connect()` handles redirects from the server during the handshake. +* Enabled readline in the interactive client. + * Added type hints (:pep:`484`). * Added documentation for extensions. From b5690affb4698d18574221ae68024b2fe995a583 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 3 Feb 2019 22:09:34 +0100 Subject: [PATCH 068/281] Add ConnectionClosed subclass for normal closure. Thanks @cjerdonek for the suggestion. Fix #285. --- docs/changelog.rst | 5 ++++ src/websockets/exceptions.py | 29 ++++++++++++++++++++- src/websockets/protocol.py | 49 ++++++++++++++++++++---------------- src/websockets/server.py | 4 +-- tests/test_exceptions.py | 4 +-- 5 files changed, 64 insertions(+), 27 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9618c1d4b..ee407d13e 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,6 +31,11 @@ Also: :meth:`~protocol.WebSocketCommonProtocol.pong` support bytes-like types :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`. +* Added :exc:`~exceptions.ConnectionClosedOK` and + :exc:`~exceptions.ConnectionClosedError` subclasses of + :exc:`~exceptions.ConnectionClosed` to tell apart normal connection + termination from errors. + * :func:`~client.connect()` handles redirects from the server during the handshake. diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 73eb8bb79..7fdc97185 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -7,6 +7,8 @@ __all__ = [ "AbortHandshake", "ConnectionClosed", + "ConnectionClosedError", + "ConnectionClosedOK", "DuplicateParameter", "InvalidHandshake", "InvalidHeader", @@ -203,7 +205,6 @@ def format_close(code: int, reason: str) -> str: """ Display a human-readable version of the close code and reason. - """ if 3000 <= code < 4000: explanation = "registered" @@ -238,6 +239,32 @@ def __init__(self, code: int, reason: str) -> None: super().__init__(message) +class ConnectionClosedError(ConnectionClosed): + """ + Like :exc:`ConnectionClosed`, when the connection terminated with an error. + + This means the close code is different from 1000 (OK) and 1001 (going away). + + """ + + def __init__(self, code: int, reason: str) -> None: + assert code != 1000 and code != 1001 + super().__init__(code, reason) + + +class ConnectionClosedOK(ConnectionClosed): + """ + Like :exc:`ConnectionClosed`, when the connection terminated properly. + + This means the close code is 1000 (OK) or 1001 (going away). + + """ + + def __init__(self, code: int, reason: str) -> None: + assert code == 1000 or code == 1001 + super().__init__(code, reason) + + class InvalidURI(Exception): """ Exception raised when an URI isn't a valid websocket URI. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index b0fff8fad..c07aef99f 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -30,6 +30,8 @@ from .exceptions import ( ConnectionClosed, + ConnectionClosedError, + ConnectionClosedOK, InvalidState, PayloadTooBig, WebSocketProtocolError, @@ -78,8 +80,8 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): The iterator yields incoming messages. It exits normally when the connection is closed with the close code 1000 (OK) or 1001 (going away). - It raises a :exc:`~websockets.exceptions.ConnectionClosed` exception when - the connection is closed with any other status code. + It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception + when the connection is closed with any other status code. The ``host``, ``port`` and ``secure`` parameters are simply stored as attributes for handlers that need them. @@ -114,8 +116,8 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): The ``max_size`` parameter enforces the maximum size for incoming messages in bytes. The default value is 1 MiB. ``None`` disables the limit. If a message larger than the maximum size is received, :meth:`recv()` will - raise :exc:`~websockets.exceptions.ConnectionClosed` and the connection - will be closed with status code 1009. + raise :exc:`~websockets.exceptions.ConnectionClosedError` and the + connection will be closed with status code 1009. The ``max_queue`` parameter sets the maximum length of the queue that holds incoming messages. The default value is ``32``. ``None`` disables @@ -382,11 +384,8 @@ async def __aiter__(self) -> AsyncIterator[Data]: try: while True: yield await self.recv() - except ConnectionClosed as exc: - if exc.code == 1000 or exc.code == 1001: - return - else: - raise + except ConnectionClosedOK: + return async def recv(self) -> Data: """ @@ -396,8 +395,11 @@ async def recv(self) -> Data: binary frame. When the end of the message stream is reached, :meth:`recv` raises - :exc:`~websockets.exceptions.ConnectionClosed`. This can happen after - a normal connection closure, a protocol error or a network failure. + :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it + raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal + connection closure and + :exc:`~websockets.exceptions.ConnectionClosedError`after a protocol + error or a network failure. .. versionchanged:: 3.0 @@ -659,6 +661,16 @@ async def pong(self, data: bytes = b"") -> None: # Private methods - no guarantees. + def connection_closed_exc(self) -> ConnectionClosed: + exception: ConnectionClosed + if self.close_code == 1000 or self.close_code == 1001: + exception = ConnectionClosedOK(self.close_code, self.close_reason) + else: + exception = ConnectionClosedError(self.close_code, self.close_reason) + # Chain to the exception that terminated data transfer, if any. + exception.__cause__ = self.transfer_data_exc + return exception + async def ensure_open(self) -> None: """ Check that the WebSocket connection is open. @@ -673,16 +685,12 @@ async def ensure_open(self) -> None: # from OPEN to CLOSED. if self.transfer_data_task.done(): await asyncio.shield(self.close_connection_task) - raise ConnectionClosed( - self.close_code, self.close_reason - ) from self.transfer_data_exc + raise self.connection_closed_exc() else: return if self.state is State.CLOSED: - raise ConnectionClosed( - self.close_code, self.close_reason - ) from self.transfer_data_exc + raise self.connection_closed_exc() if self.state is State.CLOSING: # If we started the closing handshake, wait for its completion to @@ -691,9 +699,7 @@ async def ensure_open(self) -> None: # CLOSING state also occurs when failing the connection. In that # case self.close_connection_task will complete even faster. await asyncio.shield(self.close_connection_task) - raise ConnectionClosed( - self.close_code, self.close_reason - ) from self.transfer_data_exc + raise self.connection_closed_exc() # Control may only reach this point in buggy third-party subclasses. assert self.state is State.CONNECTING @@ -1163,8 +1169,7 @@ def abort_keepalive_pings(self) -> None: """ assert self.state is State.CLOSED - exc = ConnectionClosed(self.close_code, self.close_reason) - exc.__cause__ = self.transfer_data_exc # emulate raise ... from ... + exc = self.connection_closed_exc() for ping in self.pings.values(): ping.set_exception(exc) diff --git a/src/websockets/server.py b/src/websockets/server.py index fca6c2caf..e202ea25b 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -782,8 +782,8 @@ class Serve: When a server is closed with :meth:`~WebSocketServer.close`, it closes all connections with close code 1001 (going away). WebSocket handlers — which are running the coroutine passed in the ``ws_handler`` — will receive a - :exc:`~websockets.exceptions.ConnectionClosed` exception on their current - or next interaction with the WebSocket connection. + :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their + current or next interaction with the WebSocket connection. Since there's no useful way to propagate exceptions triggered in handlers, they're sent to the ``'websockets.server'`` logger instead. Debugging is diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 3ccdadb82..6dfbeb7e6 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -90,7 +90,7 @@ def test_str(self): "(OK), no reason", ), ( - ConnectionClosed(1001, 'bye'), + ConnectionClosedOK(1001, 'bye'), "WebSocket connection is closed: code = 1001 " "(going away), reason = bye", ), @@ -100,7 +100,7 @@ def test_str(self): "(connection closed abnormally [internal]), no reason" ), ( - ConnectionClosed(1016, None), + ConnectionClosedError(1016, None), "WebSocket connection is closed: code = 1016 " "(unknown), no reason" ), From f3e40cbfc56d8770e57b65b3cd6b35377e6028c8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 8 May 2019 14:26:35 +0200 Subject: [PATCH 069/281] Normalize quotes in a # fmt: off section. --- tests/test_exceptions.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 6dfbeb7e6..4b9830345 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -13,7 +13,7 @@ def test_str(self): "Invalid request", ), ( - AbortHandshake(200, Headers(), b'OK\n'), + AbortHandshake(200, Headers(), b"OK\n"), "HTTP 200, 0 headers, 3 bytes", ), ( @@ -21,44 +21,44 @@ def test_str(self): "Malformed HTTP message", ), ( - InvalidHeader('Name'), + InvalidHeader("Name"), "Missing Name header", ), ( - InvalidHeader('Name', None), + InvalidHeader("Name", None), "Missing Name header", ), ( - InvalidHeader('Name', ''), + InvalidHeader("Name", ""), "Empty Name header", ), ( - InvalidHeader('Name', 'Value'), + InvalidHeader("Name", "Value"), "Invalid Name header: Value", ), ( InvalidHeaderFormat( - 'Sec-WebSocket-Protocol', "expected token", 'a=|', 3 + "Sec-WebSocket-Protocol", "expected token", "a=|", 3 ), "Invalid Sec-WebSocket-Protocol header: " "expected token at 3 in a=|", ), ( - InvalidHeaderValue('Sec-WebSocket-Version', '42'), + InvalidHeaderValue("Sec-WebSocket-Version", "42"), "Invalid Sec-WebSocket-Version header: 42", ), ( - InvalidUpgrade('Upgrade'), + InvalidUpgrade("Upgrade"), "Missing Upgrade header", ), ( - InvalidUpgrade('Connection', 'websocket'), + InvalidUpgrade("Connection", "websocket"), "Invalid Connection header: websocket", ), ( - InvalidOrigin('http://bad.origin'), - 'Invalid Origin header: http://bad.origin', + InvalidOrigin("http://bad.origin"), + "Invalid Origin header: http://bad.origin", ), ( InvalidStatusCode(403), @@ -69,15 +69,15 @@ def test_str(self): "Unsupported subprotocol: spam", ), ( - InvalidParameterName('|'), + InvalidParameterName("|"), "Invalid parameter name: |", ), ( - InvalidParameterValue('a', '|'), + InvalidParameterValue("a", "|"), "Invalid value for parameter a: |", ), ( - DuplicateParameter('a'), + DuplicateParameter("a"), "Duplicate parameter: a", ), ( @@ -85,12 +85,12 @@ def test_str(self): "WebSocket connection isn't established yet", ), ( - ConnectionClosed(1000, ''), + ConnectionClosed(1000, ""), "WebSocket connection is closed: code = 1000 " "(OK), no reason", ), ( - ConnectionClosedOK(1001, 'bye'), + ConnectionClosedOK(1001, "bye"), "WebSocket connection is closed: code = 1001 " "(going away), reason = bye", ), From 423e175cce5dc05f6fab8457fe00492e1f11a34a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 12 May 2019 09:29:36 +0200 Subject: [PATCH 070/281] Lock mypy version. Work around https://github.com/python/mypy/issues/6802. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7397c90ae..801d4d5d1 100644 --- a/tox.ini +++ b/tox.ini @@ -25,4 +25,4 @@ deps = isort [testenv:mypy] commands = mypy --strict src -deps = mypy +deps = mypy==0.670 From c2649b14037c02b12c6e5756ff8d983b86659b68 Mon Sep 17 00:00:00 2001 From: reallinfo <36298335+reallinfo@users.noreply.github.com> Date: Sun, 12 May 2019 04:26:28 +0300 Subject: [PATCH 071/281] Add files via upload --- logo/horizontal.svg | 156 +++++++++++++++++++++++++++++++++++++++++++ logo/icon.svg | 43 ++++++++++++ logo/vertical.svg | 157 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 356 insertions(+) create mode 100644 logo/horizontal.svg create mode 100644 logo/icon.svg create mode 100644 logo/vertical.svg diff --git a/logo/horizontal.svg b/logo/horizontal.svg new file mode 100644 index 000000000..766c706f5 --- /dev/null +++ b/logo/horizontal.svg @@ -0,0 +1,156 @@ + + + + + + + + + + +]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/logo/icon.svg b/logo/icon.svg new file mode 100644 index 000000000..69592fea4 --- /dev/null +++ b/logo/icon.svg @@ -0,0 +1,43 @@ + + + + + + + + + + +]> + + + + + + + + + + + + + diff --git a/logo/vertical.svg b/logo/vertical.svg new file mode 100644 index 000000000..e83e1fefe --- /dev/null +++ b/logo/vertical.svg @@ -0,0 +1,157 @@ + + + + + + + + + + +]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From d2292d04ef21ede8a5eb838f73dbb99245eadc4d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 12 May 2019 11:16:13 +0200 Subject: [PATCH 072/281] Fine-tune logo. * Add margins * Use round pixel dimensions * Restore gradient on symbol (I like it!) * Insert in README and docs. --- README.rst | 5 +- docs/_static/websockets.svg | 17 +--- logo/horizontal.svg | 187 ++++++----------------------------- logo/icon.svg | 58 +++-------- logo/old.svg | 14 +++ logo/vertical.svg | 188 ++++++------------------------------ 6 files changed, 95 insertions(+), 374 deletions(-) mode change 100644 => 120000 docs/_static/websockets.svg create mode 100644 logo/old.svg diff --git a/README.rst b/README.rst index ae47c7a48..ecfc2e534 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,6 @@ -WebSockets -========== +.. image:: logo/horizontal.svg + :width: 480px + :alt: websockets |rtd| |pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov| diff --git a/docs/_static/websockets.svg b/docs/_static/websockets.svg deleted file mode 100644 index 409afb71d..000000000 --- a/docs/_static/websockets.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - diff --git a/docs/_static/websockets.svg b/docs/_static/websockets.svg new file mode 120000 index 000000000..84c316758 --- /dev/null +++ b/docs/_static/websockets.svg @@ -0,0 +1 @@ +../../logo/vertical.svg \ No newline at end of file diff --git a/logo/horizontal.svg b/logo/horizontal.svg index 766c706f5..ee872dc47 100644 --- a/logo/horizontal.svg +++ b/logo/horizontal.svg @@ -1,156 +1,31 @@ - - - - - - - - - - -]> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/logo/icon.svg b/logo/icon.svg index 69592fea4..cb760940a 100644 --- a/logo/icon.svg +++ b/logo/icon.svg @@ -1,43 +1,15 @@ - - - - - - - - - - -]> - - - - - - - - - - - - - + + + + + + + + + + + + + + + diff --git a/logo/old.svg b/logo/old.svg new file mode 100644 index 000000000..a073139e3 --- /dev/null +++ b/logo/old.svg @@ -0,0 +1,14 @@ + + + + + + + + + + + + diff --git a/logo/vertical.svg b/logo/vertical.svg index e83e1fefe..b07fb2238 100644 --- a/logo/vertical.svg +++ b/logo/vertical.svg @@ -1,157 +1,31 @@ - - - - - - - - - - -]> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From a2d0cfd0e418ad75d0de04337047fa88c7101a57 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 12 May 2019 19:30:23 +0200 Subject: [PATCH 073/281] The official name is lowercase. --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 040d41598..7679f2e38 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -WebSockets +websockets ========== |pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov| From 8d51ce2da0cbfa971bc2d74c54283671e0e544b8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 23 May 2019 22:06:56 +0200 Subject: [PATCH 074/281] Add Tidelift marketing & security. --- README.rst | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/README.rst b/README.rst index ecfc2e534..8cbe55260 100644 --- a/README.rst +++ b/README.rst @@ -103,16 +103,9 @@ The development of ``websockets`` is shaped by four principles: Documentation is a first class concern in the project. Head over to `Read the Docs`_ and see for yourself. -Professional support is available if you — or your company — are so inclined. -`Get in touch`_. - -(If you contribute to ``websockets`` and would like to become an official -support provider, let me know.) - .. _Read the Docs: https://websockets.readthedocs.io/ .. _handle backpressure correctly: https://vorpus.org/blog/some-thoughts-on-asynchronous-api-design-in-a-post-asyncawait-world/#websocket-servers .. _Autobahn Testsuite: https://github.com/aaugustin/websockets/blob/master/compliance/README.rst -.. _Get in touch: https://fractalideas.com/ Why shouldn't I use ``websockets``? ----------------------------------- @@ -127,12 +120,31 @@ Why shouldn't I use ``websockets``? * If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which only works on Python 3. ``websockets`` requires Python ≥ 3.6. + +*Professionally supported websockets is now available* +------------------------------------------------------ + +*Tidelift gives software development teams a single source for purchasing and +maintaining their software, with professional grade assurances from the +experts who know it best, while seamlessly integrating with existing tools.* + +`Get supported websockets with the Tidelift subscription +`_ + +(If you contribute to ``websockets`` and would like to become an official +support provider, `let me know `_.) + What else? ---------- Bug reports, patches and suggestions are welcome! -Please open an issue_ or send a `pull request`_. +To report a security vulnerability, please use the `Tidelift security +contact`_. Tidelift will coordinate the fix and disclosure. + +.. _Tidelift security contact: https://tidelift.com/security + +For anything else, please open an issue_ or send a `pull request`_. .. _issue: https://github.com/aaugustin/websockets/issues/new .. _pull request: https://github.com/aaugustin/websockets/compare/ From 8cd4449977fc821725edb91df8247785dbb8a4f3 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 31 May 2019 08:04:53 +0200 Subject: [PATCH 075/281] Add Tidelift as sponsoring method --- .github/FUNDING.yml | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..7ae223b3d --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +tidelift: "pypi/websockets" From e262874b3787ea968dc52bdf1f4869bdc272cb17 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 16 Jun 2019 16:37:06 +0200 Subject: [PATCH 076/281] Improve Tidelift marketing. * Add Tidelift logo in README. * Improve UX by separating Tidelift advertising clearly in README. * Move message earlier in README. * Add link to the sidebar in docs. --- README.rst | 30 ++++++++++++++---------------- docs/conf.py | 4 +++- logo/tidelift.png | Bin 0 -> 4069 bytes 3 files changed, 17 insertions(+), 17 deletions(-) create mode 100644 logo/tidelift.png diff --git a/README.rst b/README.rst index 8cbe55260..6bdafb2ed 100644 --- a/README.rst +++ b/README.rst @@ -75,9 +75,21 @@ And here's an echo server: Does that look good? -`Start here!`_ +`Get started with the tutorial!`_ -.. _Start here!: https://websockets.readthedocs.io/en/stable/intro.html +.. _Get started with the tutorial!: https://websockets.readthedocs.io/en/stable/intro.html + +.. raw:: html + +
+ +

Professionally supported websockets is now available

+

Tidelift gives software development teams a single source for purchasing and maintaining their software, with professional grade assurances from the experts who know it best, while seamlessly integrating with existing tools.

+

Get supported websockets with the Tidelift Subscription

+
+ +(If you contribute to ``websockets`` and would like to become an official +support provider, `let me know `_.) Why should I use ``websockets``? -------------------------------- @@ -120,20 +132,6 @@ Why shouldn't I use ``websockets``? * If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which only works on Python 3. ``websockets`` requires Python ≥ 3.6. - -*Professionally supported websockets is now available* ------------------------------------------------------- - -*Tidelift gives software development teams a single source for purchasing and -maintaining their software, with professional grade assurances from the -experts who know it best, while seamlessly integrating with existing tools.* - -`Get supported websockets with the Tidelift subscription -`_ - -(If you contribute to ``websockets`` and would like to become an official -support provider, `let me know `_.) - What else? ---------- diff --git a/docs/conf.py b/docs/conf.py index f4e81db35..e5e6ab15f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -113,6 +113,7 @@ 'github_button': True, 'github_user': 'aaugustin', 'github_repo': 'websockets', + 'tidelift_url': 'https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=docs', } # Add any paths that contain custom themes here, relative to this directory. @@ -151,9 +152,10 @@ html_sidebars = { '**': [ 'about.html', + 'searchbox.html', 'navigation.html', 'relations.html', - 'searchbox.html', + 'donate.html', ] } diff --git a/logo/tidelift.png b/logo/tidelift.png new file mode 100644 index 0000000000000000000000000000000000000000..317dc4d9852df72ba34e10a6f61d1838cbbd969e GIT binary patch literal 4069 zcmeHKX;4$y626285oA+{C5WQL2QzE}vKV%kz!e2?31JbUA|g025fUUiY>rrh;Fz#z z06|dH3QI(hEkM{rKoG$YWc4YEhKLZ18;b7)bWFXc<5)HRd#Oq#_ttm5?$cj)pFYPo zyErPysmnnSqyRgu+X6udg7}M)0dK1PPxphLxxYGjghG&^w)iV`@PZu$f|SzWbq?DO z_C6ah4%Qe^kGo^BFn9}Wweq=KL%w~OQAe>k@xrzRt$Hh~5Uu+OjC)WnQn0o_>#pJ@ z&CP1g1H0nvX+fOgO{XT6ZcF{HwLbQa-z~z()!qhEDHB0ypF#d!iJC(uW`< z8j9v1AZ6g3_%UVR3D76&|7-5cpieG-^y!}gecJ3t$0dA{Fed?$ z3>$F#+aQUE0LQ<=^t~VnmB3O-pCp(hKw@Qoq?BLJoG+^+ekwPKb6_F&q)Jo=qm z0S5!yi4(~rvjy()rT6^M+0%JBUrT zucJ9Rt1jXo)%!c%FI%ikP(8O7X}?Sc2VSd2Y9lxNTQeMqZXD0o%T|@u*}KqYH`pb| zf37}|a6f^EQYNfol=j9TBA)01>CA7HyM5M{<(si|{m0_{%Ph3ry%P!YpeOsOABOcY z;*8ODG5J6p3bHFzCa@gZjiEUV4Jlv+iAE&Y3|mWYrkzpIV(He_p`zFHTlPkC>Ozh{ z>1O5#B>F;`IU*Y>?P_)+E@SD|hnX;R%8ln{*N)LMG^7y;o!;tF9rw%l+?si@oSelX zrNV_GC6nm9*JiMzH(0Q`*dNBigH5>_(F5h1gyzjrK(jYtCZ6hZU7wv%8E-gMveCrJ z#Bz1z6U@_YU$}FsE^2Vo0b;!UGvmEhdqs8&VYJAIs>KQ*?>FEtrP}U!od{%FvTvJ^@`+cF%cb~Oj|pHE=8;3hGd7z>}} zK=~EraA(2pJQB#Z1K33(+Tj;lfFioJQ&0Wwrn}%;^L(F#nN(~P{v5mrfshZlG=(Vw z(CqUGfSm_Z;oz#@LBTMF&AA4(qw|fbt5fbd3oFy+D--M&ykqcJPUQ@T8N&r> zs>%e8VO6FJZe&G9>D%~@iVec$?R$W^p5iHt9oM)MD+;ZwAQ{R7Tx$Qno~{z9%+olL z-xy}X&#Ratr^SlanIvHsk^91&$5qae&km22?LndRiS^xjA%?Fe(u=ZiL(ZZwD(H5H zOZ!&+`lwBDdC*zDcdD8d@Ugn}IoZ1{@xf9(%3!(-APbp@#4gGl>xy6&p$Z)Gd!&%) zakVizTlW|7!cpv!N8nhxuL8*ZaTAwrdY;F=8*@_xOtwrly%37BN9$WRMZ5rXxYx(B z`mPK8WzUgQhA&#C%|Yq+916dTA3Tw`OZ$|WYv=J$k7!P86`9Uwr#PpVa77x?iXs~I>qL|k%~*K2Ez2aVRT+uy*fN@8!NXnQ-emAj zi7NL2FM1nW(g$$eK()*p@tPpsLF^J?p7A*|Cvj9`IEeTXpNP;ss)_OW9$<4Zb5x(2 z!%Z#eH*np{@dA#|L4Rb2dQCUeD}dr>K+nj^*l~+?*hD2j=Q+{g&Dho=wsCyUnxKrp z#`a9HImRPH?nPu1&dtxSZEI$0g|PxKX-bIbO1kOkqLb@0V?xk>NH4^5p=TQG)0n#$ zq?cEzPlV~pcs#C~JOkLjgV&PY?buCiO!BJP%v!o+$&!s=G&sUOGJE)c>^0)XJA00d ztKsXXiDrUV>R~oK=FSftdeR`78(3Ixg{iBU=jxPwM)X_w`bZuVG@R`7W&AuU;tgr7 z(Zr>s{Lmsaka_+CzIC&H^JJVa-+|@|C|Z~@0NPA4a}i!nO54KrP?!oGDF#Wi${=6S zy?)xdd2~(9(OiuD?8_B}R2WS@pDfU7_64gunQ|cI(n#bp!?%;k&g0Iu`El>Bc*TJb zUDw1P0|;#MvDCy{npedrX&0qAnr01L1z8*kbfH@2TkK4|kd|=5nSbOpSXg1?J zNZc1G7JV+wSQx2ITf0B<}GPi%pbdzBm51E+E^}z}=V6 z{;8JQ*v=6@wE%NnGQ03(&!Kcj@+mHRWzVO^Sy5 z$YsT=b{2_vODB=8?2MOI@k~-admqz+xif0a8x1m{l4N$$apkoEH`>t=an)U)N;7&E z{>M5Z`2Xjm`~O)}@^l1E?$gL_{`fj;G}GdV=hz1I{*yKDj3=8kLB(jc)-68=^D4)i zWM`*`p4bQn9yN$ka7I&$mCZ#LiqZ|*6hU5%B)>4cbxR~@ECI(;3qGn?;Db30Fwt!l zKi^jmiG26yWcj_4Knm!;-LiOk*3+=8;ZA|4g_S+g;Sl$BDt42p-9AdzF#Gaz&S_1S zft|Ir^=X7^AjtMSYSY6A=?BCNzYf;11csP2B Date: Sun, 16 Jun 2019 16:49:58 +0200 Subject: [PATCH 077/281] Link more prominently to RTD. --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 6bdafb2ed..7395d803a 100644 --- a/README.rst +++ b/README.rst @@ -37,6 +37,8 @@ Python with a focus on correctness and simplicity. Built on top of ``asyncio``, Python's standard asynchronous I/O framework, it provides an elegant coroutine-based API. +`Documentation is available on Read the Docs. `_ + Here's how a client sends and receives messages: .. copy-pasted because GitHub doesn't support the include directive @@ -75,9 +77,7 @@ And here's an echo server: Does that look good? -`Get started with the tutorial!`_ - -.. _Get started with the tutorial!: https://websockets.readthedocs.io/en/stable/intro.html +`Get started with the tutorial! `_ .. raw:: html From 60c61e0e39c25582d63559f6be905310a8d98bad Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 16 Jun 2019 16:50:08 +0200 Subject: [PATCH 078/281] Sync docs with README. --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 7679f2e38..6001d5075 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -30,7 +30,7 @@ Python with a focus on correctness and simplicity. Built on top of :mod:`asyncio`, Python's standard asynchronous I/O framework, it provides an elegant coroutine-based API. -Here's a client that says "Hello world!": +Here's how a client sends and receives messages: .. literalinclude:: ../example/hello.py From 05ccc5ee64d5f24ed77809985ae5176d71c6caaf Mon Sep 17 00:00:00 2001 From: Tobin Yehle Date: Sun, 16 Jun 2019 08:53:02 -0700 Subject: [PATCH 079/281] Mark package as typed (#590) --- MANIFEST.in | 1 + setup.py | 3 ++- src/websockets/py.typed | 0 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 src/websockets/py.typed diff --git a/MANIFEST.in b/MANIFEST.in index 1aba38f67..1c660b95b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,2 @@ include LICENSE +include src/websockets/py.typed diff --git a/setup.py b/setup.py index d4fadb240..3c87b2339 100644 --- a/setup.py +++ b/setup.py @@ -48,10 +48,11 @@ 'Programming Language :: Python :: 3.7', ], package_dir = {'': 'src'}, + package_data = {'websockets': ['py.typed']}, packages=packages, ext_modules=ext_modules, include_package_data=True, - zip_safe=True, + zip_safe=False, python_requires='>=3.6', test_loader='unittest:TestLoader', ) diff --git a/src/websockets/py.typed b/src/websockets/py.typed new file mode 100644 index 000000000..e69de29bb From b3d60d75fc2973b67ac39d34f08409207c557a97 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 16 Jun 2019 18:02:11 +0200 Subject: [PATCH 080/281] Don't crash if a extra_headers callable returns None. Fix #619. --- docs/changelog.rst | 2 ++ src/websockets/server.py | 4 ++-- tests/test_client_server.py | 6 ++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index ee407d13e..f4cd8a4b6 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -39,6 +39,8 @@ Also: * :func:`~client.connect()` handles redirects from the server during the handshake. +* Avoided a crash of a ``extra_headers`` callable returns ``None``. + * Enabled readline in the interactive client. * Added type hints (:pep:`484`). diff --git a/src/websockets/server.py b/src/websockets/server.py index e202ea25b..73c07cf11 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -540,9 +540,9 @@ async def handshake( if protocol_header is not None: response_headers["Sec-WebSocket-Protocol"] = protocol_header + if callable(extra_headers): + extra_headers = extra_headers(path, self.request_headers) if extra_headers is not None: - if callable(extra_headers): - extra_headers = extra_headers(path, self.request_headers) if isinstance(extra_headers, Headers): extra_headers = extra_headers.raw_items() elif isinstance(extra_headers, collections.abc.Mapping): diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 83b1e0fd9..5c441561f 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -566,6 +566,12 @@ def test_protocol_custom_response_headers_callable_list(self): resp_headers = self.loop.run_until_complete(self.client.recv()) self.assertIn("('X-Spam', 'Eggs')", resp_headers) + @with_server(extra_headers=lambda p, r: None) + @with_client("/headers") + def test_protocol_custom_response_headers_callable(self): + self.loop.run_until_complete(self.client.recv()) # doesn't crash + self.loop.run_until_complete(self.client.recv()) # nothing to check + @with_server(extra_headers=Headers({"X-Spam": "Eggs"})) @with_client("/headers") def test_protocol_custom_response_headers(self): From 595978c75b42a768e8c85accd02884a8ffa9a503 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 16 Jun 2019 18:11:01 +0200 Subject: [PATCH 081/281] Add missing changelog entry for 8fc78fee. --- docs/changelog.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index f4cd8a4b6..f02280855 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -39,6 +39,9 @@ Also: * :func:`~client.connect()` handles redirects from the server during the handshake. +* Improved support for sending fragmented messages by accepting asynchronous + iterators in :meth:`~protocol.WebSocketCommonProtocol.send`. + * Avoided a crash of a ``extra_headers`` callable returns ``None``. * Enabled readline in the interactive client. From f255722c158b531415916ae29be16f295987d5d7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 17 Jun 2019 13:17:17 +0200 Subject: [PATCH 082/281] Fix copy-paste mistake in b3d60d75. --- tests/test_client_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 5c441561f..21de5486f 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -568,7 +568,7 @@ def test_protocol_custom_response_headers_callable_list(self): @with_server(extra_headers=lambda p, r: None) @with_client("/headers") - def test_protocol_custom_response_headers_callable(self): + def test_protocol_custom_response_headers_callable_none(self): self.loop.run_until_complete(self.client.recv()) # doesn't crash self.loop.run_until_complete(self.client.recv()) # nothing to check From 218f0a9866740773349e8d50f76b4af1d9873d39 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 19 Jun 2019 21:04:29 +0200 Subject: [PATCH 083/281] Change process_request to be a coroutine (again). b64fee8e made it possible to use either a function or a coroutine. It was part of the 7.0 release. 4f1a14c3 documented this possibility but wasn't released. However, users may have read the "latest" docs and taken advantage of this. For this reason, include proper deprecation warnings and preserve backwards-compatibility (for the foreseeable future). The deprecation warnings need to be in two locations to account for passing a process_request argument and for overriding the process_request method. Issue #597 shows that `isinstance(..., Awaitable)` is more robust than `asyncio.iscoroutinefunction(...)` because it also supports functions returning awaitables. --- docs/changelog.rst | 16 ++++++- example/health_check_server.py | 2 +- src/websockets/server.py | 49 +++++++++++--------- tests/test_client_server.py | 81 ++++++++++++++++++++++++++++++---- 4 files changed, 116 insertions(+), 32 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f02280855..53e5a1267 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -14,7 +14,21 @@ Changelog .. note:: - **Version 8.0 changes the behavior of the ``max_queue`` parameter.** + **Version 8.0 expects** ``process_request`` **to be a coroutine.** + + Previously, it could be a function or a coroutine. + + If you're passing a ``process_request`` argument to :func:`~server.serve` + or :class:`~server.WebSocketServerProtocol`, or if you're overriding + :meth:`~protocol.WebSocketServerProtocol.process_request` in a subclass, + define it with ``async def`` instead of ``def``. + + For backwards compatibility, functions are still supported. However, in + some inheritance scenarios, mixing functions and coroutines won't work. + +.. note:: + + **Version 8.0 changes the behavior of the** ``max_queue`` **parameter.** If you were setting ``max_queue=0`` to make the queue of incoming messages unbounded, change it to ``max_queue=None``. diff --git a/example/health_check_server.py b/example/health_check_server.py index 8e70890b5..feb04bccd 100755 --- a/example/health_check_server.py +++ b/example/health_check_server.py @@ -6,7 +6,7 @@ import http import websockets -def health_check(path, request_headers): +async def health_check(path, request_headers): if path == '/health/': return http.HTTPStatus.OK, [], b'OK\n' diff --git a/src/websockets/server.py b/src/websockets/server.py index 73c07cf11..870e4ec7a 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -84,10 +84,7 @@ def __init__( subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, process_request: Optional[ - Callable[ - [str, Headers], - Union[Optional[HTTPResponse], Awaitable[Optional[HTTPResponse]]], - ] + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] ] = None, select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] @@ -266,15 +263,15 @@ def write_http_response( logger.debug("%s > Body (%d bytes)", self.side, len(body)) self.writer.write(body) - def process_request( + async def process_request( self, path: str, request_headers: Headers - ) -> Union[Optional[HTTPResponse], Awaitable[Optional[HTTPResponse]]]: + ) -> Optional[HTTPResponse]: """ Intercept the HTTP request and return an HTTP response if needed. ``request_headers`` is a :class:`~websockets.http.Headers` instance. - If this method returns ``None``, the WebSocket handshake continues. + If this coroutine returns ``None``, the WebSocket handshake continues. If it returns a status code, headers and a response body, that HTTP response is sent and the connection is closed. @@ -286,12 +283,10 @@ def process_request( The HTTP response body must be :class:`bytes`. It may be empty. - This method may be overridden to check the request headers and set a - different status, for example to authenticate the request and return - ``HTTPStatus.UNAUTHORIZED`` or ``HTTPStatus.FORBIDDEN``. - - It can be declared as a function or as a coroutine because such - authentication checks are likely to require network requests. + This coroutine may be overridden to check the request headers and set + a different status, for example to authenticate the request and return + :attr:`http.HTTPStatus.UNAUTHORIZED` or + :attr:`http.HTTPStatus.FORBIDDEN`. It may also be overridden by passing a ``process_request`` argument to the :class:`WebSocketServerProtocol` constructor or the :func:`serve` @@ -299,7 +294,15 @@ def process_request( """ if self._process_request is not None: - return self._process_request(path, request_headers) + response = self._process_request(path, request_headers) + if isinstance(response, Awaitable): + return await response + else: + # For backwards-compatibility with 7.0. + warnings.warn( + "declare process_request as a coroutine", DeprecationWarning + ) + return response # type: ignore return None @staticmethod @@ -503,9 +506,13 @@ async def handshake( # Hook for customizing request handling, for example checking # authentication or treating some paths as plain HTTP endpoints. - early_response = self.process_request(path, request_headers) - if isinstance(early_response, Awaitable): - early_response = await early_response + early_response_awaitable = self.process_request(path, request_headers) + if isinstance(early_response_awaitable, Awaitable): + early_response = await early_response_awaitable + else: + # For backwards-compatibility with 7.0. + warnings.warn("declare process_request as a coroutine", DeprecationWarning) + early_response = early_response_awaitable # type: ignore # Change the response to a 503 error if the server is shutting down. if not self.ws_server.is_serving(): @@ -767,9 +774,9 @@ class Serve: :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` pairs, or a callable taking the request path and headers in arguments and returning one of the above - * ``process_request`` is a callable or a coroutine taking the request path - and headers in argument, see - :meth:`~WebSocketServerProtocol.process_request` for details + * ``process_request`` is a coroutine taking the request path and headers + in argument, see :meth:`~WebSocketServerProtocol.process_request` for + details * ``select_subprotocol`` is a callable taking the subprotocols offered by the client and available on the server in argument, see :meth:`~WebSocketServerProtocol.select_subprotocol` for details @@ -821,7 +828,7 @@ def __init__( subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, process_request: Optional[ - Callable[[str, Headers], Optional[HTTPResponse]] + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] ] = None, select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 21de5486f..a540c373c 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -270,7 +270,7 @@ def start_server(self, expected_warning=None, **kwds): def start_redirecting_server( self, status, include_location=True, force_insecure=False ): - def _process_request(path, headers): + async def process_request(path, headers): server_uri = get_server_uri(self.server, self.secure, path) if force_insecure: server_uri = server_uri.replace("wss:", "ws:") @@ -283,7 +283,7 @@ def _process_request(path, headers): 0, compression=None, ping_interval=None, - process_request=_process_request, + process_request=process_request, ssl=self.server_context, ) self.redirecting_server = self.loop.run_until_complete(start_server) @@ -458,15 +458,65 @@ def test_unix_socket(self): client_socket.close() self.stop_server() - @with_server(process_request=lambda p, rh: (http.HTTPStatus.OK, [], b"OK\n")) + async def process_request_OK(path, request_headers): + return http.HTTPStatus.OK, [], b"OK\n" + + @with_server(process_request=process_request_OK) def test_process_request_argument(self): response = self.loop.run_until_complete(self.make_http_request("/")) with contextlib.closing(response): self.assertEqual(response.code, 200) + def legacy_process_request_OK(path, request_headers): + return http.HTTPStatus.OK, [], b"OK\n" + + @with_server(process_request=legacy_process_request_OK) + def test_process_request_argument_backwards_compatibility(self): + with warnings.catch_warnings(record=True) as recorded_warnings: + response = self.loop.run_until_complete(self.make_http_request("/")) + + with contextlib.closing(response): + self.assertEqual(response.code, 200) + + self.assertEqual(len(recorded_warnings), 1) + warning = recorded_warnings[0].message + self.assertEqual(str(warning), "declare process_request as a coroutine") + self.assertEqual(type(warning), DeprecationWarning) + + class ProcessRequestOKServerProtocol(WebSocketServerProtocol): + async def process_request(self, path, request_headers): + return http.HTTPStatus.OK, [], b"OK\n" + + @with_server(create_protocol=ProcessRequestOKServerProtocol) + def test_process_request_override(self): + response = self.loop.run_until_complete(self.make_http_request("/")) + + with contextlib.closing(response): + self.assertEqual(response.code, 200) + + class LegacyProcessRequestOKServerProtocol(WebSocketServerProtocol): + def process_request(self, path, request_headers): + return http.HTTPStatus.OK, [], b"OK\n" + + @with_server(create_protocol=LegacyProcessRequestOKServerProtocol) + def test_process_request_override_backwards_compatibility(self): + with warnings.catch_warnings(record=True) as recorded_warnings: + response = self.loop.run_until_complete(self.make_http_request("/")) + + with contextlib.closing(response): + self.assertEqual(response.code, 200) + + self.assertEqual(len(recorded_warnings), 1) + warning = recorded_warnings[0].message + self.assertEqual(str(warning), "declare process_request as a coroutine") + self.assertEqual(type(warning), DeprecationWarning) + + def select_subprotocol_chat(client_subprotocols, server_subprotocols): + return "chat" + @with_server( - subprotocols=["superchat", "chat"], select_subprotocol=lambda cs, ss: "chat" + subprotocols=["superchat", "chat"], select_subprotocol=select_subprotocol_chat ) @with_client("/subprotocol", subprotocols=["superchat", "chat"]) def test_select_subprotocol_argument(self): @@ -474,6 +524,20 @@ def test_select_subprotocol_argument(self): self.assertEqual(server_subprotocol, repr("chat")) self.assertEqual(self.client.subprotocol, "chat") + class SelectSubprotocolChatServerProtocol(WebSocketServerProtocol): + def select_subprotocol(self, client_subprotocols, server_subprotocols): + return "chat" + + @with_server( + subprotocols=["superchat", "chat"], + create_protocol=SelectSubprotocolChatServerProtocol, + ) + @with_client("/subprotocol", subprotocols=["superchat", "chat"]) + def test_select_subprotocol_override(self): + server_subprotocol = self.loop.run_until_complete(self.client.recv()) + self.assertEqual(server_subprotocol, repr("chat")) + self.assertEqual(self.client.subprotocol, "chat") + @with_server() @with_client("/attributes") def test_protocol_attributes(self): @@ -658,11 +722,10 @@ def assert_client_raises_code(self, status_code): def test_server_create_protocol(self): self.assert_client_raises_code(401) - @with_server( - create_protocol=( - lambda *args, **kwargs: UnauthorizedServerProtocol(*args, **kwargs) - ) - ) + def create_unauthorized_server_protocol(*args, **kwargs): + return UnauthorizedServerProtocol(*args, **kwargs) + + @with_server(create_protocol=create_unauthorized_server_protocol) def test_server_create_protocol_function(self): self.assert_client_raises_code(401) From b288d651e07e9f1614b10421d55833984b448132 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 21 Jun 2019 21:37:59 +0200 Subject: [PATCH 084/281] Factor out test utilities. --- tests/test_client_server.py | 122 +++++++++++++++--------------------- tests/test_framing.py | 9 +-- tests/test_http.py | 10 +-- tests/test_protocol.py | 29 ++------- tests/test_speedups.py | 0 tests/utils.py | 38 +++++++++++ 6 files changed, 96 insertions(+), 112 deletions(-) delete mode 100644 tests/test_speedups.py create mode 100644 tests/utils.py diff --git a/tests/test_client_server.py b/tests/test_client_server.py index a540c373c..8a1177a7e 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -33,6 +33,7 @@ from websockets.server import * from .test_protocol import MS +from .utils import AsyncioTestCase # Avoid displaying stack traces at the ERROR logging level. @@ -226,25 +227,15 @@ def encode(self, frame): return frame -class ClientServerTests(unittest.TestCase): +class ClientServerTestsMixin: secure = False def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) + super().setUp() self.server = None self.redirecting_server = None - def tearDown(self): - self.loop.close() - - def run_loop_once(self): - # Process callbacks scheduled with call_soon by appending a callback - # to stop the event loop then running it until it hits that callback. - self.loop.call_soon(self.loop.stop) - self.loop.run_forever() - @property def server_context(self): return None @@ -349,6 +340,40 @@ def temp_client(self, *args, **kwds): with temp_test_client(self, *args, **kwds): yield + +class SecureClientServerTestsMixin(ClientServerTestsMixin): + + secure = True + + @property + def server_context(self): + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) + ssl_context.load_cert_chain(testcert) + return ssl_context + + @property + def client_context(self): + ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + ssl_context.load_verify_locations(testcert) + return ssl_context + + def start_server(self, **kwds): + kwds.setdefault("ssl", self.server_context) + super().start_server(**kwds) + + def start_client(self, path="/", **kwds): + kwds.setdefault("ssl", self.client_context) + super().start_client(path, **kwds) + + +class CommonClientServerTests: + """ + Mixin that defines most tests but doesn't inherit unittest.TestCase. + + Tests are run by the ClientServerTests and SecureClientServerTests subclasses. + + """ + @with_server() @with_client() def test_basic(self): @@ -1211,29 +1236,15 @@ def test_connection_error_during_closing_handshake(self, close): self.assertEqual(self.client.close_code, 1006) -class SSLClientServerTests(ClientServerTests): - - secure = True - - @property - def server_context(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) - ssl_context.load_cert_chain(testcert) - return ssl_context - - @property - def client_context(self): - ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - ssl_context.load_verify_locations(testcert) - return ssl_context +class ClientServerTests( + CommonClientServerTests, ClientServerTestsMixin, AsyncioTestCase +): + pass - def start_server(self, **kwds): - kwds.setdefault("ssl", self.server_context) - super().start_server(**kwds) - def start_client(self, path="/", **kwds): - kwds.setdefault("ssl", self.client_context) - super().start_client(path, **kwds) +class SecureClientServerTests( + CommonClientServerTests, SecureClientServerTestsMixin, AsyncioTestCase +): # TLS over Unix sockets doesn't make sense. test_unix_socket = None @@ -1253,14 +1264,7 @@ def test_redirect_insecure(self): self.fail("Did not raise") # pragma: no cover -class ClientServerOriginTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - +class ClientServerOriginTests(AsyncioTestCase): def test_checking_origin_succeeds(self): server = self.loop.run_until_complete( serve(handler, "localhost", 0, origins=["http://localhost"]) @@ -1337,14 +1341,7 @@ def test_checking_lack_of_origin_succeeds_backwards_compatibility(self): self.loop.run_until_complete(server.wait_closed()) -class YieldFromTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - +class YieldFromTests(AsyncioTestCase): def test_client(self): start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) @@ -1375,14 +1372,7 @@ def run_server(): self.loop.run_until_complete(run_server()) -class AsyncAwaitTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - +class AsyncAwaitTests(AsyncioTestCase): def test_client(self): start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) @@ -1411,14 +1401,7 @@ async def run_server(): self.loop.run_until_complete(run_server()) -class ContextManagerTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - +class ContextManagerTests(AsyncioTestCase): def test_client(self): start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) @@ -1461,20 +1444,13 @@ async def run_server(path): self.loop.run_until_complete(run_server(path)) -class AsyncIteratorTests(unittest.TestCase): +class AsyncIteratorTests(AsyncioTestCase): # This is a protocol-level feature, but since it's a high-level API, it is # much easier to exercise at the client or server level. MESSAGES = ["3", "2", "1", "Fire!"] - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - def test_iterate_on_messages(self): async def handler(ws, path): for message in self.MESSAGES: diff --git a/tests/test_framing.py b/tests/test_framing.py index 83d0a251a..430faf6e1 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -6,15 +6,10 @@ from websockets.exceptions import PayloadTooBig, WebSocketProtocolError from websockets.framing import * +from .utils import AsyncioTestCase -class FramingTests(unittest.TestCase): - def setUp(self): - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() +class FramingTests(AsyncioTestCase): def decode(self, message, mask=False, max_size=None, extensions=None): self.stream = asyncio.StreamReader(loop=self.loop) self.stream.feed_data(message) diff --git a/tests/test_http.py b/tests/test_http.py index 39961d641..60cdb9a25 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -4,18 +4,14 @@ from websockets.http import * from websockets.http import read_headers +from .utils import AsyncioTestCase -class HTTPAsyncTests(unittest.TestCase): + +class HTTPAsyncTests(AsyncioTestCase): def setUp(self): super().setUp() - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) self.stream = asyncio.StreamReader(loop=self.loop) - def tearDown(self): - self.loop.close() - super().tearDown() - def test_read_request(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 976cc7e9b..938e54d8d 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,8 +1,6 @@ import asyncio import contextlib import logging -import os -import time import unittest import unittest.mock import warnings @@ -11,23 +9,13 @@ from websockets.framing import * from websockets.protocol import State, WebSocketCommonProtocol +from .utils import MS, AsyncioTestCase + # Avoid displaying stack traces at the ERROR logging level. logging.basicConfig(level=logging.CRITICAL) -# Unit for timeouts. May be increased on slow machines by setting the -# WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. -MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) - -# asyncio's debug mode has a 10x performance penalty for this test suite. -if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover - MS *= 10 - -# Ensure that timeouts are larger than the clock's resolution (for Windows). -MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) - - async def async_iterable(iterable): for item in iterable: yield item @@ -93,8 +81,6 @@ class CommonTests: def setUp(self): super().setUp() - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) # Disable pings to make it easier to test what frames are sent exactly. self.protocol = WebSocketCommonProtocol(ping_interval=None) self.transport = TransportMock() @@ -103,17 +89,10 @@ def setUp(self): def tearDown(self): self.transport.close() self.loop.run_until_complete(self.protocol.close()) - self.loop.close() super().tearDown() # Utilities for writing tests. - def run_loop_once(self): - # Process callbacks scheduled with call_soon by appending a callback - # to stop the event loop then running it until it hits that callback. - self.loop.call_soon(self.loop.stop) - self.loop.run_forever() - def make_drain_slow(self, delay=MS): # Process connection_made in order to initialize self.protocol.writer. self.run_loop_once() @@ -1248,7 +1227,7 @@ def test_remote_close_during_send(self): # happen, considering that writes are serialized. -class ServerTests(CommonTests, unittest.TestCase): +class ServerTests(CommonTests, AsyncioTestCase): def setUp(self): super().setUp() self.protocol.is_client = False @@ -1299,7 +1278,7 @@ def test_local_close_connection_lost_timeout_after_close(self): self.assertConnectionClosed(1000, "close") -class ClientTests(CommonTests, unittest.TestCase): +class ClientTests(CommonTests, AsyncioTestCase): def setUp(self): super().setUp() self.protocol.is_client = True diff --git a/tests/test_speedups.py b/tests/test_speedups.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..0a9f14ce1 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,38 @@ +import asyncio +import os +import time +import unittest + + +class AsyncioTestCase(unittest.TestCase): + """ + Base class for tests that sets up an isolated event loop for each test. + + """ + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.close() + super().tearDown() + + def run_loop_once(self): + # Process callbacks scheduled with call_soon by appending a callback + # to stop the event loop then running it until it hits that callback. + self.loop.call_soon(self.loop.stop) + self.loop.run_forever() + + +# Unit for timeouts. May be increased on slow machines by setting the +# WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. +MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) + +# asyncio's debug mode has a 10x performance penalty for this test suite. +if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover + MS *= 10 + +# Ensure that timeouts are larger than the clock's resolution (for Windows). +MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) From 250c0e05694bc57094a9511655af5dc364470ce3 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 21 Jun 2019 21:42:35 +0200 Subject: [PATCH 085/281] Add string representation for RedirectHandshake. --- src/websockets/exceptions.py | 4 ++++ tests/test_exceptions.py | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 7fdc97185..22978ec6f 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -24,6 +24,7 @@ "InvalidURI", "NegotiationError", "PayloadTooBig", + "RedirectHandshake", "WebSocketProtocolError", ] @@ -60,6 +61,9 @@ class RedirectHandshake(InvalidHandshake): def __init__(self, uri: str) -> None: self.uri = uri + def __str__(self) -> str: + return f"Redirect to {self.uri}" + class InvalidMessage(InvalidHandshake): """ diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 4b9830345..27e1b53ca 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -16,6 +16,10 @@ def test_str(self): AbortHandshake(200, Headers(), b"OK\n"), "HTTP 200, 0 headers, 3 bytes", ), + ( + RedirectHandshake("wss://example.com"), + "Redirect to wss://example.com", + ), ( InvalidMessage("Malformed HTTP message"), "Malformed HTTP message", @@ -47,7 +51,6 @@ def test_str(self): InvalidHeaderValue("Sec-WebSocket-Version", "42"), "Invalid Sec-WebSocket-Version header: 42", ), - ( InvalidUpgrade("Upgrade"), "Missing Upgrade header", From 3a7e4a3810675a015f783a499305864d0efb8705 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 09:31:57 +0200 Subject: [PATCH 086/281] Remove override made unnecessary by 218f0a98. --- docs/api.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/api.rst b/docs/api.rst index acdc69dab..ef02c9a83 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -49,7 +49,6 @@ Server .. automethod:: handshake(origins=None, available_extensions=None, available_subprotocols=None, extra_headers=None) .. automethod:: process_request(path, request_headers) - :async: .. automethod:: select_subprotocol(client_subprotocols, server_subprotocols) Client From 3278eddb7bcbf51637c8ac64680bd7176db57b6d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 09:47:45 +0200 Subject: [PATCH 087/281] Remove explicit argument lists. Except those that Sphinx cannot build automatically because of backwards-compatibility hacks. --- docs/api.rst | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index ef02c9a83..9870c5dff 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -41,15 +41,15 @@ Server .. autoclass:: WebSocketServer - .. automethod:: close() - .. automethod:: wait_closed() + .. automethod:: close + .. automethod:: wait_closed .. autoattribute:: sockets .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) - .. automethod:: handshake(origins=None, available_extensions=None, available_subprotocols=None, extra_headers=None) - .. automethod:: process_request(path, request_headers) - .. automethod:: select_subprotocol(client_subprotocols, server_subprotocols) + .. automethod:: handshake + .. automethod:: process_request + .. automethod:: select_subprotocol Client ...... @@ -61,7 +61,7 @@ Client .. autoclass:: WebSocketClientProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) - .. automethod:: handshake(wsuri, origin=None, available_extensions=None, available_subprotocols=None, extra_headers=None) + .. automethod:: handshake Shared ...... @@ -70,14 +70,14 @@ Shared .. autoclass:: WebSocketCommonProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) - .. automethod:: close(code=1000, reason='') - .. automethod:: wait_closed() + .. automethod:: close + .. automethod:: wait_closed - .. automethod:: recv() - .. automethod:: send(data) + .. automethod:: recv + .. automethod:: send - .. automethod:: ping(data=None) - .. automethod:: pong(data=b'') + .. automethod:: ping + .. automethod:: pong .. autoattribute:: local_address .. autoattribute:: remote_address From c2b7e1bb5a221fe7fea6efb53ab33d8efc8783ad Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 09:48:20 +0200 Subject: [PATCH 088/281] Remove parentheses in :func: and :meth: references. --- docs/changelog.rst | 22 +++++------ docs/deployment.rst | 4 +- docs/design.rst | 90 ++++++++++++++++++++++----------------------- docs/extensions.rst | 2 +- 4 files changed, 59 insertions(+), 59 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 53e5a1267..17ecd5523 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -50,7 +50,7 @@ Also: :exc:`~exceptions.ConnectionClosed` to tell apart normal connection termination from errors. -* :func:`~client.connect()` handles redirects from the server during the +* :func:`~client.connect` handles redirects from the server during the handshake. * Improved support for sending fragmented messages by accepting asynchronous @@ -72,7 +72,7 @@ Also: .. warning:: **Version 7.0 renames the** ``timeout`` **argument of** - :func:`~server.serve()` **and** :func:`~client.connect()` **to** + :func:`~server.serve()` **and** :func:`~client.connect` **to** ``close_timeout`` **.** This prevents confusion with ``ping_timeout``. @@ -122,7 +122,7 @@ Also: :class:`~protocol.WebSocketCommonProtocol` for details. * Added ``process_request`` and ``select_subprotocol`` arguments to - :func:`~server.serve()` and :class:`~server.WebSocketServerProtocol` to + :func:`~server.serve` and :class:`~server.WebSocketServerProtocol` to customize :meth:`~server.WebSocketServerProtocol.process_request` and :meth:`~server.WebSocketServerProtocol.select_subprotocol` without subclassing :class:`~server.WebSocketServerProtocol`. @@ -187,7 +187,7 @@ Also: ..... * Fixed a regression in the 5.0 release that broke some invocations of - :func:`~server.serve()` and :func:`~client.connect()`. + :func:`~server.serve()` and :func:`~client.connect`. 5.0 ... @@ -212,7 +212,7 @@ Also: Also: -* :func:`~client.connect()` performs HTTP Basic Auth when the URI contains +* :func:`~client.connect` performs HTTP Basic Auth when the URI contains credentials. * Iterating on incoming messages no longer raises an exception when the @@ -268,7 +268,7 @@ Also: Compression should improve performance but it increases RAM and CPU use. If you want to disable compression, add ``compression=None`` when calling - :func:`~server.serve()` or :func:`~client.connect()`. + :func:`~server.serve()` or :func:`~client.connect`. .. warning:: @@ -306,7 +306,7 @@ Also: 3.4 ... -* Renamed :func:`~server.serve()` and :func:`~client.connect()`'s ``klass`` +* Renamed :func:`~server.serve()` and :func:`~client.connect`'s ``klass`` argument to ``create_protocol`` to reflect that it can also be a callable. For backwards compatibility, ``klass`` is still supported. @@ -314,7 +314,7 @@ Also: Python ≥ 3.5.1. * Added support for customizing handling of incoming connections with - :meth:`~server.WebSocketServerProtocol.process_request()`. + :meth:`~server.WebSocketServerProtocol.process_request`. * Made read and write buffer sizes configurable. @@ -322,10 +322,10 @@ Also: * Added an optional C extension to speed up low-level operations. -* An invalid response status code during :func:`~client.connect()` now raises +* An invalid response status code during :func:`~client.connect` now raises :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute. -* Providing a ``sock`` argument to :func:`~client.connect()` no longer +* Providing a ``sock`` argument to :func:`~client.connect` no longer crashes. 3.3 @@ -341,7 +341,7 @@ Also: ... * Added ``timeout``, ``max_size``, and ``max_queue`` arguments to - :func:`~client.connect()` and :func:`~server.serve()`. + :func:`~client.connect()` and :func:`~server.serve`. * Made server shutdown more robust. diff --git a/docs/deployment.rst b/docs/deployment.rst index b0c05dd73..9aa2d3744 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -127,7 +127,7 @@ Under high load, if a server receives more messages than it can process, bufferbloat can result in excessive memory use. By default ``websockets`` has generous limits. It is strongly recommended to -adapt them to your application. When you call :func:`~server.serve()`: +adapt them to your application. When you call :func:`~server.serve`: - Set ``max_size`` (default: 1 MiB, UTF-8 encoded) to the maximum size of messages your application generates. @@ -150,7 +150,7 @@ The author of ``websockets`` doesn't think that's a good idea, due to the widely different operational characteristics of HTTP and WebSocket. ``websockets`` provide minimal support for responding to HTTP requests with -the :meth:`~server.WebSocketServerProtocol.process_request()` hook. Typical +the :meth:`~server.WebSocketServerProtocol.process_request` hook. Typical use cases include health checks. Here's an example: .. literalinclude:: ../example/health_check_server.py diff --git a/docs/design.rst b/docs/design.rst index c6097f724..19cda16bb 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -32,20 +32,20 @@ WebSocket connections go through a trivial state machine: Transitions happen in the following places: - ``CONNECTING -> OPEN``: in - :meth:`~protocol.WebSocketCommonProtocol.connection_open()` which runs when + :meth:`~protocol.WebSocketCommonProtocol.connection_open` which runs when the :ref:`opening handshake ` completes and the WebSocket connection is established — not to be confused with :meth:`~asyncio.Protocol.connection_made` which runs when the TCP connection is established; - ``OPEN -> CLOSING``: in - :meth:`~protocol.WebSocketCommonProtocol.write_frame()` immediately before + :meth:`~protocol.WebSocketCommonProtocol.write_frame` immediately before sending a close frame; since receiving a close frame triggers sending a close frame, this does the right thing regardless of which side started the :ref:`closing handshake `; also in - :meth:`~protocol.WebSocketCommonProtocol.fail_connection()` which duplicates + :meth:`~protocol.WebSocketCommonProtocol.fail_connection` which duplicates a few lines of code from `write_close_frame()` and `write_frame()`; - ``* -> CLOSED``: in - :meth:`~protocol.WebSocketCommonProtocol.connection_lost()` which is always + :meth:`~protocol.WebSocketCommonProtocol.connection_lost` which is always called exactly once when the TCP connection is closed. Coroutines @@ -58,35 +58,35 @@ connection lifecycle on the client side. :target: _images/lifecycle.svg The lifecycle is identical on the server side, except inversion of control -makes the equivalent of :meth:`~client.connect()` implicit. +makes the equivalent of :meth:`~client.connect` implicit. Coroutines shown in green are called by the application. Multiple coroutines may interact with the WebSocket connection concurrently. Coroutines shown in gray manage the connection. When the opening handshake -succeeds, :meth:`~protocol.WebSocketCommonProtocol.connection_open()` starts +succeeds, :meth:`~protocol.WebSocketCommonProtocol.connection_open` starts two tasks: - :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` runs - :meth:`~protocol.WebSocketCommonProtocol.transfer_data()` which handles - incoming data and lets :meth:`~protocol.WebSocketCommonProtocol.recv()` + :meth:`~protocol.WebSocketCommonProtocol.transfer_data` which handles + incoming data and lets :meth:`~protocol.WebSocketCommonProtocol.recv` consume it. It may be canceled to terminate the connection. It never exits with an exception other than :exc:`~asyncio.CancelledError`. See :ref:`data transfer ` below. - :attr:`~protocol.WebSocketCommonProtocol.keepalive_ping_task` runs - :meth:`~protocol.WebSocketCommonProtocol.keepalive_ping()` which sends Ping + :meth:`~protocol.WebSocketCommonProtocol.keepalive_ping` which sends Ping frames at regular intervals and ensures that corresponding Pong frames are received. It is canceled when the connection terminates. It never exits with an exception other than :exc:`~asyncio.CancelledError`. - :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` runs - :meth:`~protocol.WebSocketCommonProtocol.close_connection()` which waits for + :meth:`~protocol.WebSocketCommonProtocol.close_connection` which waits for the data transfer to terminate, then takes care of closing the TCP connection. It must not be canceled. It never exits with an exception. See :ref:`connection termination ` below. -Besides, :meth:`~protocol.WebSocketCommonProtocol.fail_connection()` starts +Besides, :meth:`~protocol.WebSocketCommonProtocol.fail_connection` starts the same :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` when the opening handshake fails, in order to close the TCP connection. @@ -113,7 +113,7 @@ Opening handshake ----------------- ``websockets`` performs the opening handshake when establishing a WebSocket -connection. On the client side, :meth:`~client.connect()` executes it before +connection. On the client side, :meth:`~client.connect` executes it before returning the protocol to the caller. On the server side, it's executed before passing the protocol to the ``ws_handler`` coroutine handling the connection. @@ -122,26 +122,26 @@ request and the server replies with an HTTP Switching Protocols response — ``websockets`` aims at keeping the implementation of both sides consistent with one another. -On the client side, :meth:`~client.WebSocketClientProtocol.handshake()`: +On the client side, :meth:`~client.WebSocketClientProtocol.handshake`: - builds a HTTP request based on the ``uri`` and parameters passed to - :meth:`~client.connect()`; + :meth:`~client.connect`; - writes the HTTP request to the network; - reads a HTTP response from the network; - checks the HTTP response, validates ``extensions`` and ``subprotocol``, and configures the protocol accordingly; - moves to the ``OPEN`` state. -On the server side, :meth:`~server.WebSocketServerProtocol.handshake()`: +On the server side, :meth:`~server.WebSocketServerProtocol.handshake`: - reads a HTTP request from the network; -- calls :meth:`~server.WebSocketServerProtocol.process_request()` which may +- calls :meth:`~server.WebSocketServerProtocol.process_request` which may abort the WebSocket handshake and return a HTTP response instead; this hook only makes sense on the server side; - checks the HTTP request, negotiates ``extensions`` and ``subprotocol``, and configures the protocol accordingly; - builds a HTTP response based on the above and parameters passed to - :meth:`~server.serve()`; + :meth:`~server.serve`; - writes the HTTP response to the network; - moves to the ``OPEN`` state; - returns the ``path`` part of the ``uri``. @@ -226,10 +226,10 @@ When it encounters a control frame: Running this process in a task guarantees that control frames are processed promptly. Without such a task, ``websockets`` would depend on the application to drive the connection by having exactly one coroutine awaiting -:meth:`~protocol.WebSocketCommonProtocol.recv()` at any time. While this +:meth:`~protocol.WebSocketCommonProtocol.recv` at any time. While this happens naturally in many use cases, it cannot be relied upon. -Then :meth:`~protocol.WebSocketCommonProtocol.recv()` fetches the next message +Then :meth:`~protocol.WebSocketCommonProtocol.recv` fetches the next message from the :attr:`~protocol.WebSocketCommonProtocol.messages` queue, with some complexity added for handling termination correctly. @@ -238,16 +238,16 @@ Sending data The right side of the diagram shows how ``websockets`` sends data. -:meth:`~protocol.WebSocketCommonProtocol.send()` writes a single data frame +:meth:`~protocol.WebSocketCommonProtocol.send` writes a single data frame containing the message. Fragmentation isn't supported at this time. -:meth:`~protocol.WebSocketCommonProtocol.ping()` writes a ping frame and +:meth:`~protocol.WebSocketCommonProtocol.ping` writes a ping frame and yields a :class:`~asyncio.Future` which will be completed when a matching pong frame is received. -:meth:`~protocol.WebSocketCommonProtocol.pong()` writes a pong frame. +:meth:`~protocol.WebSocketCommonProtocol.pong` writes a pong frame. -:meth:`~protocol.WebSocketCommonProtocol.close()` writes a close frame and +:meth:`~protocol.WebSocketCommonProtocol.close` writes a close frame and waits for the TCP connection to terminate. Outgoing data is written to a :class:`~asyncio.StreamWriter` in order to @@ -259,15 +259,15 @@ Closing handshake ................. When the other side of the connection initiates the closing handshake, -:meth:`~protocol.WebSocketCommonProtocol.read_message()` receives a close +:meth:`~protocol.WebSocketCommonProtocol.read_message` receives a close frame while in the ``OPEN`` state. It moves to the ``CLOSING`` state, sends a close frame, and returns ``None``, causing :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. When this side of the connection initiates the closing handshake with -:meth:`~protocol.WebSocketCommonProtocol.close()`, it moves to the ``CLOSING`` +:meth:`~protocol.WebSocketCommonProtocol.close`, it moves to the ``CLOSING`` state and sends a close frame. When the other side sends a close frame, -:meth:`~protocol.WebSocketCommonProtocol.read_message()` receives it in the +:meth:`~protocol.WebSocketCommonProtocol.read_message` receives it in the ``CLOSING`` state and returns ``None``, also causing :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. @@ -417,30 +417,30 @@ Once the WebSocket connection is established, internal tasks accidentally canceled if a coroutine that awaits them is canceled. In other words, they must be shielded from cancellation. -:meth:`~protocol.WebSocketCommonProtocol.recv()` waits for the next message in +:meth:`~protocol.WebSocketCommonProtocol.recv` waits for the next message in the queue or for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` -to terminate, whichever comes first. It relies on :func:`~asyncio.wait()` for +to terminate, whichever comes first. It relies on :func:`~asyncio.wait` for waiting on two tasks in parallel. As a consequence, even though it's waiting on the transfer data task, it doesn't propagate cancellation to that task. -:meth:`~protocol.WebSocketCommonProtocol.ensure_open()` is called by -:meth:`~protocol.WebSocketCommonProtocol.send()`, -:meth:`~protocol.WebSocketCommonProtocol.ping()`, and -:meth:`~protocol.WebSocketCommonProtocol.pong()`. When the connection state is +:meth:`~protocol.WebSocketCommonProtocol.ensure_open` is called by +:meth:`~protocol.WebSocketCommonProtocol.send`, +:meth:`~protocol.WebSocketCommonProtocol.ping`, and +:meth:`~protocol.WebSocketCommonProtocol.pong`. When the connection state is ``CLOSING``, it waits for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` but shields it to prevent cancellation. -:meth:`~protocol.WebSocketCommonProtocol.close()` waits for the data transfer +:meth:`~protocol.WebSocketCommonProtocol.close` waits for the data transfer task to terminate with :func:`~asyncio.wait_for`. If it's canceled or if the timeout elapses, :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` is canceled, which is correct at this point. -:meth:`~protocol.WebSocketCommonProtocol.close()` then waits for +:meth:`~protocol.WebSocketCommonProtocol.close` then waits for :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` but shields it to prevent cancellation. -:meth:`~protocol.WebSocketCommonProtocol.close()` and -:func:`~protocol.WebSocketCommonProtocol.fail_connection()` are the only +:meth:`~protocol.WebSocketCommonProtocol.close` and +:func:`~protocol.WebSocketCommonProtocol.fail_connection` are the only places where :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` may be canceled. @@ -515,35 +515,35 @@ For each connection, the receiving side contains these buffers: - OS buffers: tuning them is an advanced optimization. - :class:`~asyncio.StreamReader` bytes buffer: the default limit is 64 KiB. You can set another limit by passing a ``read_limit`` keyword argument to - :func:`~client.connect()` or :func:`~server.serve()`. + :func:`~client.connect()` or :func:`~server.serve`. - Incoming messages :class:`~collections.deque`: its size depends both on the size and the number of messages it contains. By default the maximum UTF-8 encoded size is 1 MiB and the maximum number is 32. In the worst case, after UTF-8 decoding, a single message could take up to 4 MiB of memory and the overall memory consumption could reach 128 MiB. You should adjust these limits by setting the ``max_size`` and ``max_queue`` keyword arguments of - :func:`~client.connect()` or :func:`~server.serve()` according to your + :func:`~client.connect()` or :func:`~server.serve` according to your application's requirements. For each connection, the sending side contains these buffers: - :class:`~asyncio.StreamWriter` bytes buffer: the default size is 64 KiB. You can set another limit by passing a ``write_limit`` keyword argument to - :func:`~client.connect()` or :func:`~server.serve()`. + :func:`~client.connect()` or :func:`~server.serve`. - OS buffers: tuning them is an advanced optimization. Concurrency ----------- -Calling any combination of :meth:`~protocol.WebSocketCommonProtocol.recv()`, -:meth:`~protocol.WebSocketCommonProtocol.send()`, -:meth:`~protocol.WebSocketCommonProtocol.close()` -:meth:`~protocol.WebSocketCommonProtocol.ping()`, or -:meth:`~protocol.WebSocketCommonProtocol.pong()` concurrently is safe, +Calling any combination of :meth:`~protocol.WebSocketCommonProtocol.recv`, +:meth:`~protocol.WebSocketCommonProtocol.send`, +:meth:`~protocol.WebSocketCommonProtocol.close` +:meth:`~protocol.WebSocketCommonProtocol.ping`, or +:meth:`~protocol.WebSocketCommonProtocol.pong` concurrently is safe, including multiple calls to the same method. As shown above, receiving frames is independent from sending frames. That -isolates :meth:`~protocol.WebSocketCommonProtocol.recv()`, which receives +isolates :meth:`~protocol.WebSocketCommonProtocol.recv`, which receives frames, from the other methods, which send frames. Methods that send frames also support concurrent calls. While the connection diff --git a/docs/extensions.rst b/docs/extensions.rst index 7c282ffd0..400034090 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -14,7 +14,7 @@ Per-Message Deflate, specified in :rfc:`7692`. Per-Message Deflate ------------------- -:func:`~server.serve()` and :func:`~client.connect()` enable the Per-Message +:func:`~server.serve()` and :func:`~client.connect` enable the Per-Message Deflate extension by default. You can disable this with ``compression=None``. You can also configure the Per-Message Deflate extension explicitly if you From 56cd365310a341eef59f11659631d7ee73b9f1da Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 17 Jun 2019 13:18:18 +0200 Subject: [PATCH 089/281] Support HTTP Basic Auth on the server side. Fix #492. --- docs/api.rst | 11 +++ docs/changelog.rst | 3 + src/websockets/__init__.py | 4 +- src/websockets/auth.py | 151 ++++++++++++++++++++++++++++++++++++ src/websockets/client.py | 6 +- src/websockets/headers.py | 102 +++++++++++++++++++++++- tests/test_auth.py | 136 ++++++++++++++++++++++++++++++++ tests/test_client_server.py | 45 ++++++----- tests/test_headers.py | 66 ++++++++++++++-- 9 files changed, 488 insertions(+), 36 deletions(-) create mode 100644 src/websockets/auth.py create mode 100644 tests/test_auth.py diff --git a/docs/api.rst b/docs/api.rst index 9870c5dff..ef567ed5b 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -102,6 +102,17 @@ Per-Message Deflate Extension .. autoclass:: ClientPerMessageDeflateFactory +HTTP Basic Auth +............... + +.. automodule:: websockets.auth + + .. autofunction:: basic_auth_protocol_factory + + .. autoclass:: BasicAuthWebSocketServerProtocol + + .. automethod:: process_request + Exceptions .......... diff --git a/docs/changelog.rst b/docs/changelog.rst index 17ecd5523..77e9da0de 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -50,6 +50,9 @@ Also: :exc:`~exceptions.ConnectionClosed` to tell apart normal connection termination from errors. +* Added :func:`~auth.basic_auth_protocol_factory` to provide HTTP Basic Auth + on the server side. + * :func:`~client.connect` handles redirects from the server during the handshake. diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 9bfbdabfe..e7ba31ce5 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -1,5 +1,6 @@ # This relies on each of the submodules having an __all__ variable. +from .auth import * from .client import * from .exceptions import * from .protocol import * @@ -10,7 +11,8 @@ __all__ = ( - client.__all__ + auth.__all__ + + client.__all__ + exceptions.__all__ + protocol.__all__ + server.__all__ diff --git a/src/websockets/auth.py b/src/websockets/auth.py new file mode 100644 index 000000000..91d3d7420 --- /dev/null +++ b/src/websockets/auth.py @@ -0,0 +1,151 @@ +""" +The :mod:`websockets.auth` module implements HTTP Basic Authentication as +specified in :rfc:`7235` and :rfc:`7617`. + +""" + + +import functools +import http +from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Type, Union + +from .exceptions import InvalidHeader +from .headers import build_www_authenticate_basic, parse_authorization_basic +from .http import Headers +from .server import HTTPResponse, WebSocketServerProtocol + + +__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] + +Credentials = Tuple[str, str] + + +def is_credentials(value: Any) -> bool: + try: + username, password = value + except (TypeError, ValueError): + return False + else: + return isinstance(username, str) and isinstance(password, str) + + +class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol): + """ + WebSocket server protocol that enforces HTTP Basic Auth. + + """ + + def __init__( + self, + *args: Any, + realm: str, + check_credentials: Callable[[str, str], Awaitable[bool]], + **kwargs: Any, + ) -> None: + self.realm = realm + self.check_credentials = check_credentials + super().__init__(*args, **kwargs) + + async def process_request( + self, path: str, request_headers: Headers + ) -> Optional[HTTPResponse]: + """ + Check HTTP Basic Auth and return a HTTP 401 or 403 response if needed. + + If authentication succeeds, the username of the authenticated user is + stored in the ``username`` attribute. + + """ + try: + authorization = request_headers["Authorization"] + except KeyError: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Missing credentials\n", + ) + + try: + username, password = parse_authorization_basic(authorization) + except InvalidHeader: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Unsupported credentials\n", + ) + + if not await self.check_credentials(username, password): + return (http.HTTPStatus.FORBIDDEN, [], b"Invalid credentials\n") + + self.username = username + + return await super().process_request(path, request_headers) + + +def basic_auth_protocol_factory( + realm: str, + credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None, + check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, + create_protocol: Type[ + BasicAuthWebSocketServerProtocol + ] = BasicAuthWebSocketServerProtocol, +) -> Callable[[Any], BasicAuthWebSocketServerProtocol]: + """ + Protocol factory that enforces HTTP Basic Auth. + + ``basic_auth_protocol_factory`` is designed to integrate with + :func:`~websockets.server.serve` like this:: + + websockets.serve( + ..., + create_protocol=websockets.basic_auth_protocol_factory( + realm="my dev server", + credentials=("hello", "iloveyou"), + ) + ) + + ``realm`` indicates the scope of protection. It should be an ASCII-only + :class:`str` because the encoding of non-ASCII characters is undefined. + Refer to section 2.2 of :rfc:`7235` for details. + + One of ``credentials`` or ``check_credentials`` must be provided but not + both. + + ``credentials`` defines hardcoded authorized credentials. It can be a + ``(username, password)`` pair or a list of such pairs. + + ``check_credentials`` defines a coroutine that checks whether credentials + are authorized. This coroutine receives ``username`` and ``password`` + arguments and returns a :class:`bool`. + + By default, ``basic_auth_protocol_factory`` creates instances of + :class:`BasicAuthWebSocketServerProtocol`. You can override this with the + ``create_protocol`` parameter. + + """ + if (credentials is None) == (check_credentials is None): + raise ValueError("Provide either credentials or check_credentials") + + if credentials is not None: + if is_credentials(credentials): + + async def check_credentials(username: str, password: str) -> bool: + return (username, password) == credentials + + elif isinstance(credentials, Iterable): + credentials_list = list(credentials) + if all(is_credentials(item) for item in credentials_list): + credentials_dict = dict(credentials_list) + + async def check_credentials(username: str, password: str) -> bool: + return credentials_dict.get(username) == password + + else: + raise ValueError(f"Invalid credentials argument: {credentials}") + + else: + raise ValueError(f"Invalid credentials argument: {credentials}") + + return functools.partial( + create_protocol, realm=realm, check_credentials=check_credentials + ) diff --git a/src/websockets/client.py b/src/websockets/client.py index 3d057a2e3..e6131ed7a 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -22,7 +22,7 @@ from .handshake import build_request, check_response from .headers import ( ExtensionHeader, - build_basic_auth, + build_authorization_basic, build_extension, build_subprotocol, parse_extension, @@ -256,7 +256,9 @@ async def handshake( request_headers["Host"] = f"{wsuri.host}:{wsuri.port}" if wsuri.user_info: - request_headers["Authorization"] = build_basic_auth(*wsuri.user_info) + request_headers["Authorization"] = build_authorization_basic( + *wsuri.user_info + ) if origin is not None: request_headers["Origin"] = origin diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 663e71d60..536cab592 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -7,11 +7,13 @@ """ + import base64 +import binascii import re from typing import Callable, List, NewType, Optional, Sequence, Tuple, TypeVar, cast -from .exceptions import InvalidHeaderFormat +from .exceptions import InvalidHeaderFormat, InvalidHeaderValue from .typing import ExtensionHeader, ExtensionParameter, Subprotocol @@ -22,6 +24,9 @@ "build_extension", "parse_subprotocol", "build_subprotocol", + "build_www_authenticate_basic", + "parse_authorization_basic", + "build_authorization_basic", ] @@ -107,6 +112,25 @@ def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, i return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end() +_quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*") + + +_quote_re = re.compile(r"([\x22\x5c])") + + +def build_quoted_string(value: str) -> str: + """ + Format ``value`` as a quoted string. + + This is the reverse of :func:`parse_quoted_string`. + + """ + match = _quotable_re.fullmatch(value) + if match is None: + raise ValueError("invalid characters for quoted-string encoding") + return '"' + _quote_re.sub(r"\\\1", value) + '"' + + def parse_list( parse_item: Callable[[str, int, str], Tuple[T, int]], header: str, @@ -392,7 +416,18 @@ def build_subprotocol(protocols: Sequence[Subprotocol]) -> str: build_subprotocol_list = build_subprotocol # alias for backwards-compatibility -def build_basic_auth(username: str, password: str) -> str: +def build_www_authenticate_basic(realm: str) -> str: + """ + Build an WWW-Authenticate header for HTTP Basic Auth. + + """ + # https://tools.ietf.org/html/rfc7617#section-2 + realm = build_quoted_string(realm) + charset = build_quoted_string("UTF-8") + return f"Basic realm={realm}, charset={charset}" + + +def build_authorization_basic(username: str, password: str) -> str: """ Build an Authorization header for HTTP Basic Auth. @@ -402,3 +437,66 @@ def build_basic_auth(username: str, password: str) -> str: user_pass = f"{username}:{password}" basic_credentials = base64.b64encode(user_pass.encode()).decode() return "Basic " + basic_credentials + + +_token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*") + + +def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]: + """ + Parse a token68 from ``header`` at the given position. + + Return the token value and the new position. + + Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + + """ + match = _token68_re.match(header, pos) + if match is None: + raise InvalidHeaderFormat(header_name, "expected token68", header, pos) + return match.group(), match.end() + + +def parse_end(header: str, pos: int, header_name: str) -> None: + """ + Check that parsing reached the end of header. + + """ + if pos < len(header): + raise InvalidHeaderFormat(header_name, "trailing data", header, pos) + + +def parse_authorization_basic(header: str) -> Tuple[str, str]: + """ + Parse an Authorization header for HTTP Basic Auth. + + Return a ``(username, password)`` tuple. + + """ + # https://tools.ietf.org/html/rfc7235#section-2.1 + # https://tools.ietf.org/html/rfc7617#section-2 + scheme, pos = parse_token(header, 0, "Authorization") + if scheme.lower() != "basic": + raise InvalidHeaderValue("Authorization", f"unsupported scheme: {scheme}") + if peek_ahead(header, pos) != " ": + raise InvalidHeaderFormat( + "Authorization", "expected space after scheme", header, pos + ) + pos += 1 + basic_credentials, pos = parse_token68(header, pos, "Authorization") + parse_end(header, pos, "Authorization") + + try: + user_pass = base64.b64decode(basic_credentials.encode()).decode() + except binascii.Error: + raise InvalidHeaderValue( + "Authorization", "expected base64-encoded credentials" + ) from None + try: + username, password = user_pass.split(":", 1) + except ValueError: + raise InvalidHeaderValue( + "Authorization", "expected username:password credentials" + ) from None + + return username, password diff --git a/tests/test_auth.py b/tests/test_auth.py new file mode 100644 index 000000000..f6aa5c424 --- /dev/null +++ b/tests/test_auth.py @@ -0,0 +1,136 @@ +import unittest +import urllib.error + +from websockets.auth import * +from websockets.auth import is_credentials +from websockets.exceptions import InvalidStatusCode +from websockets.headers import build_authorization_basic + +from .test_client_server import ClientServerTestsMixin, with_client, with_server +from .utils import AsyncioTestCase + + +class AuthTests(unittest.TestCase): + def test_is_credentials(self): + self.assertTrue(is_credentials(("username", "password"))) + + def test_is_not_credentials(self): + self.assertFalse(is_credentials(None)) + self.assertFalse(is_credentials("username")) + + +class AuthClientServerTests(ClientServerTestsMixin, AsyncioTestCase): + + create_protocol = basic_auth_protocol_factory( + realm="auth-tests", credentials=("hello", "iloveyou") + ) + + @with_server(create_protocol=create_protocol) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth(self): + req_headers = self.client.request_headers + resp_headers = self.client.response_headers + self.assertEqual(req_headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=") + self.assertNotIn("WWW-Authenticate", resp_headers) + + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + def test_basic_auth_server_no_credentials(self): + with self.assertRaises(ValueError) as raised: + basic_auth_protocol_factory(realm="auth-tests", credentials=None) + self.assertEqual( + str(raised.exception), "Provide either credentials or check_credentials" + ) + + def test_basic_auth_server_bad_credentials(self): + with self.assertRaises(ValueError) as raised: + basic_auth_protocol_factory(realm="auth-tests", credentials=42) + self.assertEqual(str(raised.exception), "Invalid credentials argument: 42") + + create_protocol_multiple_credentials = basic_auth_protocol_factory( + realm="auth-tests", + credentials=[("hello", "iloveyou"), ("goodbye", "stillloveu")], + ) + + @with_server(create_protocol=create_protocol_multiple_credentials) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_server_multiple_credentials(self): + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + def test_basic_auth_bad_multiple_credentials(self): + with self.assertRaises(ValueError) as raised: + basic_auth_protocol_factory( + realm="auth-tests", credentials=[("hello", "iloveyou"), 42] + ) + self.assertEqual( + str(raised.exception), + "Invalid credentials argument: [('hello', 'iloveyou'), 42]", + ) + + async def check_credentials(username, password): + return password == "iloveyou" + + create_protocol_check_credentials = basic_auth_protocol_factory( + realm="auth-tests", check_credentials=check_credentials + ) + + @with_server(create_protocol=create_protocol_check_credentials) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_check_credentials(self): + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_missing_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client() + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_missing_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + self.loop.run_until_complete(self.make_http_request()) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) + self.assertEqual(raised.exception.read().decode(), "Missing credentials\n") + + @with_server(create_protocol=create_protocol) + def test_basic_auth_unsupported_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client(extra_headers={"Authorization": "Digest ..."}) + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_unsupported_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + self.loop.run_until_complete( + self.make_http_request(headers={"Authorization": "Digest ..."}) + ) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) + self.assertEqual(raised.exception.read().decode(), "Unsupported credentials\n") + + @with_server(create_protocol=create_protocol) + def test_basic_auth_invalid_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client(user_info=("hello", "ihateyou")) + self.assertEqual(raised.exception.status_code, 403) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_invalid_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + authorization = build_authorization_basic("hello", "ihateyou") + self.loop.run_until_complete( + self.make_http_request(headers={"Authorization": authorization}) + ) + self.assertEqual(raised.exception.code, 403) + self.assertNotIn("WWW-Authenticate", raised.exception.headers) + self.assertEqual(raised.exception.read().decode(), "Invalid credentials\n") diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 8a1177a7e..d82aa6d40 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -340,6 +340,26 @@ def temp_client(self, *args, **kwds): with temp_test_client(self, *args, **kwds): yield + def make_http_request(self, path="/", headers=None): + if headers is None: + headers = {} + + # Set url to 'https?://:'. + url = get_server_uri( + self.server, resource_name=path, secure=self.secure + ).replace("ws", "http") + + request = urllib.request.Request(url=url, headers=headers) + + if self.secure: + open_health_check = functools.partial( + urllib.request.urlopen, request, context=self.client_context + ) + else: + open_health_check = functools.partial(urllib.request.urlopen, request) + + return self.loop.run_in_executor(None, open_health_check) + class SecureClientServerTestsMixin(ClientServerTestsMixin): @@ -586,13 +606,6 @@ def test_protocol_path(self): server_path = self.loop.run_until_complete(self.client.recv()) self.assertEqual(server_path, "/path") - @with_server() - @with_client("/headers", user_info=("user", "pass")) - def test_protocol_basic_auth(self): - self.assertEqual( - self.client.request_headers["Authorization"], "Basic dXNlcjpwYXNz" - ) - @with_server() @with_client("/headers") def test_protocol_headers(self): @@ -690,20 +703,6 @@ def test_protocol_custom_response_user_agent(self): self.assertEqual(resp_headers.count("Server"), 1) self.assertIn("('Server', 'Eggs')", resp_headers) - def make_http_request(self, path="/"): - # Set url to 'https?://:'. - url = get_server_uri(self.server, resource_name=path, secure=self.secure) - url = url.replace("ws", "http") - - if self.secure: - open_health_check = functools.partial( - urllib.request.urlopen, url, context=self.client_context - ) - else: - open_health_check = functools.partial(urllib.request.urlopen, url) - - return self.loop.run_in_executor(None, open_health_check) - @with_server(create_protocol=HealthCheckServerProtocol) def test_http_request_http_endpoint(self): # Making a HTTP request to a HTTP endpoint succeeds. @@ -979,12 +978,12 @@ def test_compression_deflate_and_explicit_config(self): def test_compression_unsupported_server(self): with self.assertRaises(ValueError): - self.loop.run_until_complete(self.start_server(compression="xz")) + self.start_server(compression="xz") @with_server() def test_compression_unsupported_client(self): with self.assertRaises(ValueError): - self.loop.run_until_complete(self.start_client(compression="xz")) + self.start_client(compression="xz") @with_server() @with_client("/subprotocol") diff --git a/tests/test_headers.py b/tests/test_headers.py index 51a0f33af..26d85fa5e 100644 --- a/tests/test_headers.py +++ b/tests/test_headers.py @@ -1,8 +1,7 @@ import unittest -from websockets.exceptions import InvalidHeaderFormat +from websockets.exceptions import InvalidHeaderFormat, InvalidHeaderValue from websockets.headers import * -from websockets.headers import build_basic_auth class HeadersTests(unittest.TestCase): @@ -17,7 +16,7 @@ def test_parse_connection(self): with self.subTest(header=header): self.assertEqual(parse_connection(header), parsed) - def test_parse_connection_invalid_header(self): + def test_parse_connection_invalid_header_format(self): for header in ["???", "keep-alive; Upgrade"]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): @@ -35,7 +34,7 @@ def test_parse_upgrade(self): with self.subTest(header=header): self.assertEqual(parse_upgrade(header), parsed) - def test_parse_upgrade_invalid_header(self): + def test_parse_upgrade_invalid_header_format(self): for header in ["???", "websocket 2", "http/3.0; websocket"]: with self.subTest(header=header): with self.assertRaises(InvalidHeaderFormat): @@ -83,7 +82,7 @@ def test_parse_extension(self): unparsed = build_extension(parsed) self.assertEqual(parse_extension(unparsed), parsed) - def test_parse_extension_invalid_header(self): + def test_parse_extension_invalid_header_format(self): for header in [ # Truncated examples "", @@ -127,9 +126,60 @@ def test_parse_subprotocol_invalid_header(self): with self.assertRaises(InvalidHeaderFormat): parse_subprotocol(header) - def test_build_basic_auth(self): - # Test vector from RFC 7617. + def test_build_www_authenticate_basic(self): + # Test vector from RFC 7617 self.assertEqual( - build_basic_auth("Aladdin", "open sesame"), + build_www_authenticate_basic("foo"), 'Basic realm="foo", charset="UTF-8"' + ) + + def test_build_www_authenticate_basic_invalid_realm(self): + # Realm contains a control character forbidden in quoted-string encoding + with self.assertRaises(ValueError): + build_www_authenticate_basic("\u0007") + + def test_build_authorization_basic(self): + # Test vector from RFC 7617 + self.assertEqual( + build_authorization_basic("Aladdin", "open sesame"), "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ) + + def test_build_authorization_basic_utf8(self): + # Test vector from RFC 7617 + self.assertEqual( + build_authorization_basic("test", "123£"), "Basic dGVzdDoxMjPCow==" + ) + + def test_parse_authorization_basic(self): + for header, parsed in [ + ("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ("Aladdin", "open sesame")), + # Password contains non-ASCII character + ("Basic dGVzdDoxMjPCow==", ("test", "123£")), + # Password contains a colon + ("Basic YWxhZGRpbjpvcGVuOnNlc2FtZQ==", ("aladdin", "open:sesame")), + # Scheme name must be case insensitive + ("basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", ("Aladdin", "open sesame")), + ]: + with self.subTest(header=header): + self.assertEqual(parse_authorization_basic(header), parsed) + + def test_parse_authorization_basic_invalid_header_format(self): + for header in [ + "// Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", + "Basic\tQWxhZGRpbjpvcGVuIHNlc2FtZQ==", + "Basic ****************************", + "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ== //", + ]: + with self.subTest(header=header): + with self.assertRaises(InvalidHeaderFormat): + parse_authorization_basic(header) + + def test_parse_authorization_basic_invalid_header_value(self): + for header in [ + "Digest ...", + "Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ", + "Basic QWxhZGNlc2FtZQ==", + ]: + with self.subTest(header=header): + with self.assertRaises(InvalidHeaderValue): + parse_authorization_basic(header) From 918d83f6abcd468998a1a6a51387ae5c42a90297 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 13:04:42 +0200 Subject: [PATCH 090/281] Add basic auth examples. --- example/basic_auth_client.py | 14 ++++++++++++++ example/basic_auth_server.py | 20 ++++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100755 example/basic_auth_client.py create mode 100755 example/basic_auth_server.py diff --git a/example/basic_auth_client.py b/example/basic_auth_client.py new file mode 100755 index 000000000..cc94dbe4b --- /dev/null +++ b/example/basic_auth_client.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python + +# WS client example with HTTP Basic Authentication + +import asyncio +import websockets + +async def hello(): + uri = "ws://mary:p@ssw0rd@localhost:8765" + async with websockets.connect(uri) as websocket: + greeting = await websocket.recv() + print(greeting) + +asyncio.get_event_loop().run_until_complete(hello()) diff --git a/example/basic_auth_server.py b/example/basic_auth_server.py new file mode 100755 index 000000000..6740d5798 --- /dev/null +++ b/example/basic_auth_server.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python + +# Server example with HTTP Basic Authentication over TLS + +import asyncio +import websockets + +async def hello(websocket, path): + greeting = f"Hello {websocket.username}!" + await websocket.send(greeting) + +start_server = websockets.serve( + hello, "localhost", 8765, + create_protocol=websockets.basic_auth_protocol_factory( + realm="example", credentials=("mary", "p@ssw0rd") + ), +) + +asyncio.get_event_loop().run_until_complete(start_server) +asyncio.get_event_loop().run_forever() From c0c31b89c1eb382ca0604a6edb762e0b2c919ed2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 13:18:41 +0200 Subject: [PATCH 091/281] Avoid crash caused by type annotations. --- src/websockets/__main__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 604caa5e4..14bf655b1 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -43,7 +43,7 @@ def win_enable_vt100() -> None: def exit_from_event_loop_thread( - loop: asyncio.AbstractEventLoop, stop: asyncio.Future[None] + loop: asyncio.AbstractEventLoop, stop: "asyncio.Future[None]" ) -> None: loop.stop() if not stop.done(): @@ -91,8 +91,8 @@ def print_over_input(string: str) -> None: async def run_client( uri: str, loop: asyncio.AbstractEventLoop, - inputs: asyncio.Queue[str], - stop: asyncio.Future[None], + inputs: "asyncio.Queue[str]", + stop: "asyncio.Future[None]", ) -> None: try: websocket = await websockets.connect(uri) From 71d476a5141be67daaab82dab729278940085a86 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 13:43:17 +0200 Subject: [PATCH 092/281] Handle ConnectionClosed exception in keepalive_ping. Fix #551. Thanks @Harmon758 for reporting this bug and identifying the root cause. --- docs/changelog.rst | 3 +++ src/websockets/protocol.py | 15 ++++++++++----- tests/test_protocol.py | 25 +++++++++++++++++++++++++ 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 77e9da0de..5f22a06eb 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -59,6 +59,9 @@ Also: * Improved support for sending fragmented messages by accepting asynchronous iterators in :meth:`~protocol.WebSocketCommonProtocol.send`. +* Prevented spurious log messages about :exc:`~exceptions.ConnectionClosed` + exceptions in keepalive ping task. + * Avoided a crash of a ``extra_headers`` callable returns ``None``. * Enabled readline in the interactive client. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index c07aef99f..c46faaf94 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -980,6 +980,7 @@ async def keepalive_ping(self) -> None: This coroutine exits when the connection terminates and one of the following happens: + - :meth:`ping` raises :exc:`ConnectionClosed`, or - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. @@ -991,11 +992,12 @@ async def keepalive_ping(self) -> None: while True: await asyncio.sleep(self.ping_interval, loop=self.loop) - # ping() cannot raise ConnectionClosed, only CancelledError: - # - If the connection is CLOSING, keepalive_ping_task will be - # canceled by close_connection() before ping() returns. - # - If the connection is CLOSED, keepalive_ping_task must be - # canceled already. + # ping() raises CancelledError if the connection is closed, + # when close_connection() cancels self.keepalive_ping_task. + + # ping() raises ConnectionClosed if the connection is lost, + # when connection_lost() calls abort_keepalive_pings(). + ping_waiter = await self.ping() if self.ping_timeout is not None: @@ -1011,6 +1013,9 @@ async def keepalive_ping(self) -> None: except asyncio.CancelledError: raise + except ConnectionClosed: + pass + except Exception: logger.warning("Unexpected exception in keepalive ping task", exc_info=True) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 938e54d8d..57c0c0e6e 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1074,6 +1074,31 @@ def test_keepalive_ping_stops_when_connection_closed(self): # The keepalive ping task terminated. self.assertTrue(self.protocol.keepalive_ping_task.cancelled()) + def test_keepalive_ping_does_not_crash_when_connection_lost(self): + self.restart_protocol_with_keepalive_ping() + # Clog incoming queue. This lets connection_lost() abort pending pings + # with a ConnectionClosed exception before transfer_data_task + # terminates and close_connection cancels keepalive_ping_task. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, b"1")) + self.receive_frame(Frame(True, OP_TEXT, b"2")) + # Ping is sent at 3ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + ping_waiter, = tuple(self.protocol.pings.values()) + # Connection drops. + self.receive_eof() + self.loop.run_until_complete(self.protocol.wait_closed()) + + # The ping waiter receives a ConnectionClosed exception. + with self.assertRaises(ConnectionClosed): + ping_waiter.result() + # The keepalive ping task terminated properly. + self.assertIsNone(self.protocol.keepalive_ping_task.result()) + + # Unclog incoming queue to terminate the test quickly. + self.loop.run_until_complete(self.protocol.recv()) + self.loop.run_until_complete(self.protocol.recv()) + def test_keepalive_ping_with_no_ping_interval(self): self.restart_protocol_with_keepalive_ping(ping_interval=None) From f8d8a61d8e2c7dcd6eb807d952cfa2b5179b29cb Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 17:46:22 +0200 Subject: [PATCH 093/281] Handle aborted pings when receiving a pong. Fix #551. Thanks @Harmon758 for reporting this bug and identifying the root cause. --- src/websockets/protocol.py | 3 ++- tests/test_protocol.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index c46faaf94..d6462cc16 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -887,7 +887,8 @@ async def read_data_frame(self, max_size: int) -> Optional[Frame]: while ping_id != frame.data: ping_id, pong_waiter = self.pings.popitem(last=False) ping_ids.append(ping_id) - pong_waiter.set_result(None) + if not pong_waiter.done(): + pong_waiter.set_result(None) pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" logger.debug( "%s - received solicited pong: %s", self.side, pong_hex diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 57c0c0e6e..57cef89e0 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -864,6 +864,35 @@ def test_acknowledge_previous_pings(self): self.assertTrue(pings[1][0].done()) self.assertFalse(pings[2][0].done()) + def test_acknowledge_aborted_ping(self): + ping = self.loop.run_until_complete(self.protocol.ping()) + ping_frame = self.last_sent_frame() + # Clog incoming queue. This lets connection_lost() abort pending pings + # with a ConnectionClosed exception before transfer_data_task + # terminates and close_connection cancels keepalive_ping_task. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, b"1")) + self.receive_frame(Frame(True, OP_TEXT, b"2")) + # Add pong frame to the queue. + pong_frame = Frame(True, OP_PONG, ping_frame.data) + self.receive_frame(pong_frame) + # Connection drops. + self.receive_eof() + self.loop.run_until_complete(self.protocol.wait_closed()) + # Ping receives a ConnectionClosed exception. + with self.assertRaises(ConnectionClosed): + ping.result() + + with self.assertLogs("websockets", level=logging.ERROR) as logs: + # We want to test that no error log is emitted. + # Unfortunately assertLogs expects at least one log message. + logging.getLogger("websockets").error("dummy") + # Unclog incoming queue. + self.loop.run_until_complete(self.protocol.recv()) + self.loop.run_until_complete(self.protocol.recv()) + # transfer_data doesn't crash, which would be logged. + self.assertEqual(logs.output[1:], []) + def test_canceled_ping(self): ping = self.loop.run_until_complete(self.protocol.ping()) ping_frame = self.last_sent_frame() From 34aaf6bcbbac62d8c605d5ba768709346ef87c6e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 18:24:12 +0200 Subject: [PATCH 094/281] Update code style for example. Start from what black produces, then wrap at 66 chars and don't skip more than one line. --- docs/deployment.rst | 2 +- docs/intro.rst | 6 +++--- example/client.py | 4 ++-- example/counter.py | 34 +++++++++++++++++++++------------- example/echo.py | 5 +++-- example/health_check_server.py | 7 ++++--- example/hello.py | 6 +++--- example/secure_client.py | 8 +++++--- example/secure_server.py | 7 ++++--- example/server.py | 2 +- example/show_time.py | 4 ++-- example/shutdown.py | 2 +- 12 files changed, 50 insertions(+), 37 deletions(-) diff --git a/docs/deployment.rst b/docs/deployment.rst index 9aa2d3744..797284f3d 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -154,4 +154,4 @@ the :meth:`~server.WebSocketServerProtocol.process_request` hook. Typical use cases include health checks. Here's an example: .. literalinclude:: ../example/health_check_server.py - :emphasize-lines: 9-11,17-18 + :emphasize-lines: 9-11,17-19 diff --git a/docs/intro.rst b/docs/intro.rst index 118167b73..8decd462d 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -40,7 +40,7 @@ coroutine returns. Here's a corresponding WebSocket client example. .. literalinclude:: ../example/client.py - :emphasize-lines: 8-10 + :emphasize-lines: 8,10 Using :func:`connect` as an asynchronous context manager ensures the connection is closed before exiting the ``hello`` coroutine. @@ -60,12 +60,12 @@ Here's how to adapt the server example to provide secure connections. See the documentation of the :mod:`ssl` module for configuring the context securely. .. literalinclude:: ../example/secure_server.py - :emphasize-lines: 19,23-24 + :emphasize-lines: 19,23-25 Here's how to adapt the client. .. literalinclude:: ../example/secure_client.py - :emphasize-lines: 10,15-16 + :emphasize-lines: 10,15-18 This client needs a context because the server uses a self-signed certificate. diff --git a/example/client.py b/example/client.py index e71595ff5..4f969c478 100755 --- a/example/client.py +++ b/example/client.py @@ -6,8 +6,8 @@ import websockets async def hello(): - async with websockets.connect( - 'ws://localhost:8765') as websocket: + uri = "ws://localhost:8765" + async with websockets.connect(uri) as websocket: name = input("What's your name? ") await websocket.send(name) diff --git a/example/counter.py b/example/counter.py index 9cce009fd..dbbbe5935 100755 --- a/example/counter.py +++ b/example/counter.py @@ -9,34 +9,41 @@ logging.basicConfig() -STATE = {'value': 0} +STATE = {"value": 0} USERS = set() + def state_event(): - return json.dumps({'type': 'state', **STATE}) + return json.dumps({"type": "state", **STATE}) + def users_event(): - return json.dumps({'type': 'users', 'count': len(USERS)}) + return json.dumps({"type": "users", "count": len(USERS)}) + async def notify_state(): - if USERS: # asyncio.wait doesn't accept an empty list + if USERS: # asyncio.wait doesn't accept an empty list message = state_event() await asyncio.wait([user.send(message) for user in USERS]) + async def notify_users(): - if USERS: # asyncio.wait doesn't accept an empty list + if USERS: # asyncio.wait doesn't accept an empty list message = users_event() await asyncio.wait([user.send(message) for user in USERS]) + async def register(websocket): USERS.add(websocket) await notify_users() + async def unregister(websocket): USERS.remove(websocket) await notify_users() + async def counter(websocket, path): # register(websocket) sends user_event() to websocket await register(websocket) @@ -44,18 +51,19 @@ async def counter(websocket, path): await websocket.send(state_event()) async for message in websocket: data = json.loads(message) - if data['action'] == 'minus': - STATE['value'] -= 1 + if data["action"] == "minus": + STATE["value"] -= 1 await notify_state() - elif data['action'] == 'plus': - STATE['value'] += 1 + elif data["action"] == "plus": + STATE["value"] += 1 await notify_state() else: - logging.error( - "unsupported event: {}", data) + logging.error("unsupported event: {}", data) finally: await unregister(websocket) -asyncio.get_event_loop().run_until_complete( - websockets.serve(counter, 'localhost', 6789)) + +start_server = websockets.serve(counter, "localhost", 6789) + +asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/echo.py b/example/echo.py index 8fa307dd7..b7ca38d32 100755 --- a/example/echo.py +++ b/example/echo.py @@ -7,6 +7,7 @@ async def echo(websocket, path): async for message in websocket: await websocket.send(message) -asyncio.get_event_loop().run_until_complete( - websockets.serve(echo, 'localhost', 8765)) +start_server = websockets.serve(echo, "localhost", 8765) + +asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/health_check_server.py b/example/health_check_server.py index feb04bccd..417063fce 100755 --- a/example/health_check_server.py +++ b/example/health_check_server.py @@ -7,15 +7,16 @@ import websockets async def health_check(path, request_headers): - if path == '/health/': - return http.HTTPStatus.OK, [], b'OK\n' + if path == "/health/": + return http.HTTPStatus.OK, [], b"OK\n" async def echo(websocket, path): async for message in websocket: await websocket.send(message) start_server = websockets.serve( - echo, 'localhost', 8765, process_request=health_check) + echo, "localhost", 8765, process_request=health_check +) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/hello.py b/example/hello.py index f90c0de55..6c9c839d8 100755 --- a/example/hello.py +++ b/example/hello.py @@ -3,10 +3,10 @@ import asyncio import websockets -async def hello(uri): +async def hello(): + uri = "ws://localhost:8765" async with websockets.connect(uri) as websocket: await websocket.send("Hello world!") await websocket.recv() -asyncio.get_event_loop().run_until_complete( - hello('ws://localhost:8765')) +asyncio.get_event_loop().run_until_complete(hello()) diff --git a/example/secure_client.py b/example/secure_client.py index 8e7f57ff9..54971b984 100755 --- a/example/secure_client.py +++ b/example/secure_client.py @@ -8,12 +8,14 @@ import websockets ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) -ssl_context.load_verify_locations( - pathlib.Path(__file__).with_name('localhost.pem')) +localhost_pem = pathlib.Path(__file__).with_name("localhost.pem") +ssl_context.load_verify_locations(localhost_pem) async def hello(): + uri = "wss://localhost:8765" async with websockets.connect( - 'wss://localhost:8765', ssl=ssl_context) as websocket: + uri, ssl=ssl_context + ) as websocket: name = input("What's your name? ") await websocket.send(name) diff --git a/example/secure_server.py b/example/secure_server.py index 5cbed46c0..2a00bdb50 100755 --- a/example/secure_server.py +++ b/example/secure_server.py @@ -17,11 +17,12 @@ async def hello(websocket, path): print(f"> {greeting}") ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) -ssl_context.load_cert_chain( - pathlib.Path(__file__).with_name('localhost.pem')) +localhost_pem = pathlib.Path(__file__).with_name("localhost.pem") +ssl_context.load_cert_chain(localhost_pem) start_server = websockets.serve( - hello, 'localhost', 8765, ssl=ssl_context) + hello, "localhost", 8765, ssl=ssl_context +) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/server.py b/example/server.py index cc5c8fea8..c8ab69971 100755 --- a/example/server.py +++ b/example/server.py @@ -14,7 +14,7 @@ async def hello(websocket, path): await websocket.send(greeting) print(f"> {greeting}") -start_server = websockets.serve(hello, 'localhost', 8765) +start_server = websockets.serve(hello, "localhost", 8765) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/show_time.py b/example/show_time.py index 6d196deb3..e5d6ac9aa 100755 --- a/example/show_time.py +++ b/example/show_time.py @@ -9,11 +9,11 @@ async def time(websocket, path): while True: - now = datetime.datetime.utcnow().isoformat() + 'Z' + now = datetime.datetime.utcnow().isoformat() + "Z" await websocket.send(now) await asyncio.sleep(random.random() * 3) -start_server = websockets.serve(time, '127.0.0.1', 5678) +start_server = websockets.serve(time, "127.0.0.1", 5678) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever() diff --git a/example/shutdown.py b/example/shutdown.py index 6d75af192..86846abe7 100755 --- a/example/shutdown.py +++ b/example/shutdown.py @@ -9,7 +9,7 @@ async def echo(websocket, path): await websocket.send(message) async def echo_server(stop): - async with websockets.serve(echo, 'localhost', 8765): + async with websockets.serve(echo, "localhost", 8765): await stop loop = asyncio.get_event_loop() From bf6db5ddeda3f2da7a48f69ec9fa6c024fdcbfa8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 18:27:29 +0200 Subject: [PATCH 095/281] Encourage users to remove workarounds. Refs #551. --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 5f22a06eb..56d4b9398 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -62,6 +62,8 @@ Also: * Prevented spurious log messages about :exc:`~exceptions.ConnectionClosed` exceptions in keepalive ping task. + If you were using ``ping_timeout=None`` as a workaround, you can remove it. + * Avoided a crash of a ``extra_headers`` callable returns ``None``. * Enabled readline in the interactive client. From 9b89de93b1d00fd404439675ecc1f3f385287cc4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 22 Jun 2019 19:04:07 +0200 Subject: [PATCH 096/281] Handle ConnectionClosed when echoing a close frame. Fix #606. Thanks @lgrahl for the bug report. --- src/websockets/protocol.py | 13 +++++++++---- tests/test_protocol.py | 21 +++++++++++++++------ tests/utils.py | 17 +++++++++++++++++ 3 files changed, 41 insertions(+), 10 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index d6462cc16..d888a9729 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -864,10 +864,15 @@ async def read_data_frame(self, max_size: int) -> Optional[Frame]: # 7.1.5. The WebSocket Connection Close Code # 7.1.6. The WebSocket Connection Close Reason self.close_code, self.close_reason = parse_close(frame.data) - # Echo the original data instead of re-serializing it with - # serialize_close() because that fails when the close frame is - # empty and parse_close() synthetizes a 1005 close code. - await self.write_close_frame(frame.data) + try: + # Echo the original data instead of re-serializing it with + # serialize_close() because that fails when the close frame + # is empty and parse_close() synthetizes a 1005 close code. + await self.write_close_frame(frame.data) + except ConnectionClosed: + # It doesn't really matter if the connection was closed + # before we could send back a close frame. + pass return None elif frame.opcode == OP_PING: diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 57cef89e0..0d3185d42 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -883,15 +883,11 @@ def test_acknowledge_aborted_ping(self): with self.assertRaises(ConnectionClosed): ping.result() - with self.assertLogs("websockets", level=logging.ERROR) as logs: - # We want to test that no error log is emitted. - # Unfortunately assertLogs expects at least one log message. - logging.getLogger("websockets").error("dummy") + # transfer_data doesn't crash, which would be logged. + with self.assertNoLogs(): # Unclog incoming queue. self.loop.run_until_complete(self.protocol.recv()) self.loop.run_until_complete(self.protocol.recv()) - # transfer_data doesn't crash, which would be logged. - self.assertEqual(logs.output[1:], []) def test_canceled_ping(self): ping = self.loop.run_until_complete(self.protocol.ping()) @@ -1205,6 +1201,19 @@ def test_remote_close(self): self.assertConnectionClosed(1000, "close") self.assertNoFrameSent() + def test_remote_close_and_connection_lost(self): + self.make_drain_slow() + # Drop the connection right after receiving a close frame, + # which prevents echoing the close frame properly. + self.receive_frame(self.close_frame) + self.receive_eof() + + with self.assertNoLogs(): + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) + + self.assertConnectionClosed(1000, "close") + self.assertOneFrameSent(*self.close_frame) + def test_simultaneous_close(self): # Receive the incoming close frame right after self.protocol.close() # starts executing. This reproduces the error described in: diff --git a/tests/utils.py b/tests/utils.py index 0a9f14ce1..059efba20 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,4 +1,6 @@ import asyncio +import contextlib +import logging import os import time import unittest @@ -25,6 +27,21 @@ def run_loop_once(self): self.loop.call_soon(self.loop.stop) self.loop.run_forever() + @contextlib.contextmanager + def assertNoLogs(self, logger="websockets", level=logging.ERROR): + """ + No message is logged on the given logger with at least the given level. + + """ + with self.assertLogs(logger, level) as logs: + # We want to test that no log message is emitted + # but assertLogs expects at least one log message. + logging.getLogger(logger).log(level, "dummy") + yield + + level_name = logging.getLevelName(level) + self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"]) + # Unit for timeouts. May be increased on slow machines by setting the # WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. From 7d429b56b62a263320fc693b6862da757ffb763f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 23 Jun 2019 18:56:27 +0200 Subject: [PATCH 097/281] Rewrite documentation for process_request. Fix #496. --- src/websockets/server.py | 46 ++++++++++++++++++++++++---------------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index 870e4ec7a..9882eabef 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -267,30 +267,40 @@ async def process_request( self, path: str, request_headers: Headers ) -> Optional[HTTPResponse]: """ - Intercept the HTTP request and return an HTTP response if needed. + Intercept the HTTP request and return an HTTP response if appropriate. - ``request_headers`` is a :class:`~websockets.http.Headers` instance. + ``path`` is a :class:`str` and ``request_headers`` is a + :class:`~websockets.http.Headers` instance. - If this coroutine returns ``None``, the WebSocket handshake continues. - If it returns a status code, headers and a response body, that HTTP - response is sent and the connection is closed. + If ``process_request`` returns ``None``, the WebSocket handshake + continues. If it returns a status code, headers and a response body, + that HTTP response is sent and the connection is closed. In that case: - The HTTP status must be a :class:`~http.HTTPStatus`. + * The HTTP status must be a :class:`~http.HTTPStatus`. + * HTTP headers must be a :class:`~websockets.http.Headers` instance, a + :class:`~collections.abc.Mapping`, or an iterable of ``(name, + value)`` pairs. + * The HTTP response body must be :class:`bytes`. It may be empty. - HTTP headers must be a :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` - pairs. + This coroutine may be overridden in a :class:`WebSocketServerProtocol` + subclass, for example: - The HTTP response body must be :class:`bytes`. It may be empty. + * to return a HTTP 200 :attr:`~http.HTTPStatus.OK` response on a given + path; then a load balancer can use this path for a health check; + * to authenticate the request and return a HTTP 401 + :attr:`~http.HTTPStatus.UNAUTHORIZED` or a HTTP 403 + :attr:`~http.HTTPStatus.FORBIDDEN` when authentication fails. - This coroutine may be overridden to check the request headers and set - a different status, for example to authenticate the request and return - :attr:`http.HTTPStatus.UNAUTHORIZED` or - :attr:`http.HTTPStatus.FORBIDDEN`. - - It may also be overridden by passing a ``process_request`` argument to - the :class:`WebSocketServerProtocol` constructor or the :func:`serve` - function. + Instead of subclassing, it is possible to pass a ``process_request`` + argument to the :class:`WebSocketServerProtocol` constructor or the + :func:`serve` function. This is equivalent, except the + ``process_request`` corountine doesn't have access to the protocol + instance, so it can't store information for later use. + + ``process_request`` is expected to complete quickly. If it may run for + a long time, then it should await :meth:`wait_closed` and exit if + :meth:`wait_closed` completes, or else it could prevent the server + from shutting down. """ if self._process_request is not None: From cac72a7bfdf744fb5d4604317f2ec68caf941751 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 23 Jun 2019 18:42:13 +0200 Subject: [PATCH 098/281] Add a FAQ. Fix #621. --- docs/changelog.rst | 2 + docs/faq.rst | 211 +++++++++++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + 3 files changed, 214 insertions(+) create mode 100644 docs/faq.rst diff --git a/docs/changelog.rst b/docs/changelog.rst index 56d4b9398..c2719560b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -70,6 +70,8 @@ Also: * Added type hints (:pep:`484`). +* Added a FAQ to the documentation. + * Added documentation for extensions. * Documented how to optimize memory usage. diff --git a/docs/faq.rst b/docs/faq.rst new file mode 100644 index 000000000..6c5352668 --- /dev/null +++ b/docs/faq.rst @@ -0,0 +1,211 @@ +FAQ +=== + +.. currentmodule:: websockets + +.. note:: + + Many questions asked in :mod:`websockets`' issue tracker are actually + about :mod:`asyncio`. Python's documentation about `developing with + asyncio`_ is a good complement. + + .. _developing with asyncio: https://docs.python.org/3/library/asyncio-dev.html + +Server side +----------- + +Why does the server close the connection after processing one message? +...................................................................... + +Your connection handler exits after processing one message. Write a loop to +process multiple messages. + +For example, if your handler looks like this:: + + async def handler(websocket, path): + print(websocket.recv()) + +change it like this:: + + async def handler(websocket, path): + async for message in websocket: + print(message) + +*Don't feel bad if this happens to you — it's the most common question in +websockets' issue tracker :-)* + +Why can only one client connect at a time? +.......................................... + +Your connection handler blocks the event loop. Look for blocking calls. +Any call that may take some time must be asynchronous. + +For example, if you have:: + + async def handler(websocket, path): + time.sleep(1) + +change it to:: + + async def handler(websocket, path): + await asyncio.sleep(1) + +This is part of learning asyncio. It isn't specific to websockets. + +See also Python's documentation about `running blocking code`_. + +.. _running blocking code: https://docs.python.org/3/library/asyncio-dev.html#running-blocking-code + +How do I get access HTTP headers, for example cookies? +...................................................... + +To access HTTP headers during the WebSocket handshake, you can override +:attr:`~server.WebSocketServerProtocol.process_request`:: + + async def process_request(self, path, request_headers): + cookies = request_header["Cookie"] + +See + +Once the connection is established, they're available in +:attr:`~protocol.WebSocketServerProtocol.request_headers`:: + + async def handler(websocket, path): + cookies = websocket.request_headers["Cookie"] + +How do I get the IP address of the client connecting to my server? +.................................................................. + +It's available in :attr:`~protocol.WebSocketCommonProtocol.remote_address`:: + + async def handler(websocket, path): + remote_ip = websocket.remote_address[0] + +How do I set which IP addresses my server listens to? +..................................................... + +Look at the ``host`` argument of :meth:`~asyncio.loop.create_server`. + +:func:`serve` accepts the same arguments as +:meth:`~asyncio.loop.create_server`. + +How do I close a connection properly? +..................................... + +websockets takes care of closing the connection when the handler exits. + +How do I run a HTTP server and WebSocket server on the same port? +................................................................. + +This isn't supported. + +Providing a HTTP server is out of scope for websockets. It only aims at +providing a WebSocket server. + +There's limited support for returning HTTP responses with the +:attr:`~server.WebSocketServerProtocol.process_request` hook. +If you need more, pick a HTTP server and run it separately. + +Client side +----------- + +How do I close a connection properly? +..................................... + +The easiest is to use :func:`connect` as a context manager:: + + async with connect(...) as websocket: + ... + +How do I reconnect automatically when the connection drops? +........................................................... + +See `issue 414`_. + +.. _issue 414: https://github.com/aaugustin/websockets/issues/414 + +How do I disable SSL certificate verification? +.............................................. + +Look at the ``ssl`` argument of :meth:`~asyncio.loop.create_connection`. + +:func:`connect` accepts the same arguments as +:meth:`~asyncio.loop.create_connection`. + +Architecture +------------ + +How do I do two things in parallel? How do I integrate with another coroutine? +.............................................................................. + +You must start two tasks, which the event loop will run concurrently. You can +achieve this with :func:`asyncio.gather` or :func:`asyncio.wait`. + +This is also part of learning asyncio and not specific to websockets. + +Keep track of the tasks and make sure they terminate or you cancel them when +the connection terminates. + +How do I create channels or topics? +................................... + +websockets doesn't have built-in publish / subscribe for these use cases. + +Depending on the scale of your service, a simple in-memory implementation may +do the job or you may need an external publish / subscribe component. + +Are there ``onopen``, ``onmessage``, ``onerror``, and ``onclose`` callbacks? +............................................................................ + +No, there aren't. + +websockets provides high-level, coroutine-based APIs. Compared to callbacks, +coroutines make it easier to manage control flow in concurrent code. + +If you prefer callback-based APIs, you should use another library. + +Can I use ``websockets`` synchronously, without ``async`` / ``await``? +...................................................................... + +You can convert every asynchronous call to a synchronous call by wrapping it +in ``asyncio.get_event_loop().run_until_complete(...)``. + +If this turns out to be impractical, you should use another library. + +Miscellaneous +------------- + +How do I set a timeout on ``recv()``? +..................................... + +Use :func:`~asyncio.wait_for`:: + + await asyncio.wait_for(websocket.recv(), timeout=10) + +This technique works for most APIs, except for asynchronous context managers. +See `issue 574`_. + +.. _issue 574: https://github.com/aaugustin/websockets/issues/574 + +How do I keep idle connections open? +.................................... + +websockets sends pings at 20 seconds intervals to keep the connection open. + +In closes the connection if it doesn't get a pong within 20 seconds. + +You can adjust this behavior with ``ping_interval`` and ``ping_timeout``. + +How do I respond to pings? +.......................... + +websockets takes care of responding to pings with pongs. + +Is there a Python 2 version? +............................ + +No, there isn't. + +websockets builds upon asyncio which requires Python 3. + + diff --git a/docs/index.rst b/docs/index.rst index 6001d5075..c18af96e4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -49,6 +49,7 @@ If you're new to ``websockets``, this is the place to start. :maxdepth: 2 intro + faq How-to guides ------------- From aa2a2bb52621626c5661f8be5de4985e18e87acf Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 24 Jun 2019 22:08:44 +0200 Subject: [PATCH 099/281] Improve HTTP parsing error messages. Fix #494. --- docs/changelog.rst | 2 ++ src/websockets/client.py | 4 +-- src/websockets/http.py | 74 ++++++++++++++++++++++++++++------------ src/websockets/server.py | 4 +-- tests/test_http.py | 69 +++++++++++++++++++++++++++++-------- 5 files changed, 113 insertions(+), 40 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c2719560b..92cbce58f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -66,6 +66,8 @@ Also: * Avoided a crash of a ``extra_headers`` callable returns ``None``. +* Improved error messages when HTTP parsing fails. + * Enabled readline in the interactive client. * Added type hints (:pep:`484`). diff --git a/src/websockets/client.py b/src/websockets/client.py index e6131ed7a..79b03d9e7 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -98,8 +98,8 @@ async def read_http_response(self) -> Tuple[int, Headers]: """ try: status_code, reason, headers = await read_response(self.reader) - except ValueError as exc: - raise InvalidMessage("Malformed HTTP message") from exc + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP response") from exc logger.debug("%s < HTTP/1.1 %d %s", self.side, status_code, reason) logger.debug("%s < %r", self.side, headers) diff --git a/src/websockets/http.py b/src/websockets/http.py index f0c58061d..6fbe5eb31 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -39,6 +39,21 @@ USER_AGENT = f"Python/{sys.version[:3]} websockets/{websockets_version}" +class SecurityError(ValueError): + """ + HTTP request or response exceeds security limits. + + """ + + +def d(value: bytes) -> str: + """ + Decode a bytestring for interpolating into an error message. + + """ + return value.decode(errors="backslashreplace") + + # See https://tools.ietf.org/html/rfc7230#appendix-B. # Regex for validating header names. @@ -85,15 +100,20 @@ async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: # version and because path isn't checked. Since WebSocket software tends # to implement HTTP/1.1 strictly, there's little need for lenient parsing. - request_line = await read_line(stream) + try: + request_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP request line") from exc - # This may raise "ValueError: not enough values to unpack" - method, raw_path, version = request_line.split(b" ", 2) + try: + method, raw_path, version = request_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None if method != b"GET": - raise ValueError("Unsupported HTTP method: %r" % method) + raise ValueError(f"unsupported HTTP method: {d(method)}") if version != b"HTTP/1.1": - raise ValueError("Unsupported HTTP version: %r" % version) + raise ValueError(f"unsupported HTTP version: {d(version)}") path = raw_path.decode("ascii", "surrogateescape") headers = await read_headers(stream) @@ -125,19 +145,26 @@ async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Header # As in read_request, parsing is simple because a fixed value is expected # for version, status_code is a 3-digit number, and reason can be ignored. - status_line = await read_line(stream) + try: + status_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP status line") from exc - # This may raise "ValueError: not enough values to unpack" - version, raw_status_code, raw_reason = status_line.split(b" ", 2) + try: + version, raw_status_code, raw_reason = status_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None if version != b"HTTP/1.1": - raise ValueError("Unsupported HTTP version: %r" % version) - # This may raise "ValueError: invalid literal for int() with base 10" - status_code = int(raw_status_code) + raise ValueError(f"unsupported HTTP version: {d(version)}") + try: + status_code = int(raw_status_code) + except ValueError: # invalid literal for int() with base 10 + raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None if not 100 <= status_code < 1000: - raise ValueError("Unsupported HTTP status code: %d" % status_code) + raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") if not _value_re.fullmatch(raw_reason): - raise ValueError("Invalid HTTP reason phrase: %r" % raw_reason) + raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") reason = raw_reason.decode() headers = await read_headers(stream) @@ -162,24 +189,29 @@ async def read_headers(stream: asyncio.StreamReader) -> "Headers": headers = Headers() for _ in range(MAX_HEADERS + 1): - line = await read_line(stream) + try: + line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP headers") from exc if line == b"": break - # This may raise "ValueError: not enough values to unpack" - raw_name, raw_value = line.split(b":", 1) + try: + raw_name, raw_value = line.split(b":", 1) + except ValueError: # not enough values to unpack (expected 2, got 1) + raise ValueError(f"invalid HTTP header line: {d(line)}") from None if not _token_re.fullmatch(raw_name): - raise ValueError("Invalid HTTP header name: %r" % raw_name) + raise ValueError(f"invalid HTTP header name: {d(raw_name)}") raw_value = raw_value.strip(b" \t") if not _value_re.fullmatch(raw_value): - raise ValueError("Invalid HTTP header value: %r" % raw_value) + raise ValueError(f"invalid HTTP header value: {d(raw_value)}") name = raw_name.decode("ascii") # guaranteed to be ASCII at this point value = raw_value.decode("ascii", "surrogateescape") headers[name] = value else: - raise ValueError("Too many HTTP headers") + raise SecurityError("too many HTTP headers") return headers @@ -197,10 +229,10 @@ async def read_line(stream: asyncio.StreamReader) -> bytes: line = await stream.readline() # Security: this guarantees header values are small (hard-coded = 4 KiB) if len(line) > MAX_LINE: - raise ValueError("Line too long") + raise SecurityError("line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 if not line.endswith(b"\r\n"): - raise ValueError("Line without CRLF") + raise EOFError("line without CRLF") return line[:-2] diff --git a/src/websockets/server.py b/src/websockets/server.py index 9882eabef..c8eb46351 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -227,8 +227,8 @@ async def read_http_request(self) -> Tuple[str, Headers]: """ try: path, headers = await read_request(self.reader) - except ValueError as exc: - raise InvalidMessage("Malformed HTTP message") from exc + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP request") from exc logger.debug("%s < GET %s HTTP/1.1", self.side, path) logger.debug("%s < %r", self.side, headers) diff --git a/tests/test_http.py b/tests/test_http.py index 60cdb9a25..8ba1d190f 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -29,6 +29,33 @@ def test_read_request(self): self.assertEqual(path, "/chat") self.assertEqual(headers["Upgrade"], "websocket") + def test_read_request_empty(self): + self.stream.feed_eof() + with self.assertRaisesRegex( + EOFError, "connection closed while reading HTTP request line" + ): + self.loop.run_until_complete(read_request(self.stream)) + + def test_read_request_invalid_request_line(self): + self.stream.feed_data(b"GET /\r\n\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP request line: GET /"): + self.loop.run_until_complete(read_request(self.stream)) + + def test_read_request_unsupported_method(self): + self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP method: OPTIONS"): + self.loop.run_until_complete(read_request(self.stream)) + + def test_read_request_unsupported_version(self): + self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): + self.loop.run_until_complete(read_request(self.stream)) + + def test_read_request_invalid_header(self): + self.stream.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): + self.loop.run_until_complete(read_request(self.stream)) + def test_read_response(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( @@ -46,29 +73,41 @@ def test_read_response(self): self.assertEqual(reason, "Switching Protocols") self.assertEqual(headers["Upgrade"], "websocket") - def test_request_method(self): - self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") - with self.assertRaises(ValueError): - self.loop.run_until_complete(read_request(self.stream)) + def test_read_response_empty(self): + self.stream.feed_eof() + with self.assertRaisesRegex( + EOFError, "connection closed while reading HTTP status line" + ): + self.loop.run_until_complete(read_response(self.stream)) - def test_request_version(self): - self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") - with self.assertRaises(ValueError): - self.loop.run_until_complete(read_request(self.stream)) + def test_read_request_invalid_status_line(self): + self.stream.feed_data(b"Hello!\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP status line: Hello!"): + self.loop.run_until_complete(read_response(self.stream)) - def test_response_version(self): + def test_read_response_unsupported_version(self): self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") - with self.assertRaises(ValueError): + with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): self.loop.run_until_complete(read_response(self.stream)) - def test_response_status(self): + def test_read_response_invalid_status(self): + self.stream.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP status code: OMG"): + self.loop.run_until_complete(read_response(self.stream)) + + def test_read_response_unsupported_status(self): self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") - with self.assertRaises(ValueError): + with self.assertRaisesRegex(ValueError, "unsupported HTTP status code: 007"): self.loop.run_until_complete(read_response(self.stream)) - def test_response_reason(self): + def test_read_response_invalid_reason(self): self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") - with self.assertRaises(ValueError): + with self.assertRaisesRegex(ValueError, "invalid HTTP reason phrase: \\x7f"): + self.loop.run_until_complete(read_response(self.stream)) + + def test_read_response_invalid_header(self): + self.stream.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): self.loop.run_until_complete(read_response(self.stream)) def test_header_name(self): @@ -94,7 +133,7 @@ def test_line_limit(self): def test_line_ending(self): self.stream.feed_data(b"foo: bar\n\n") - with self.assertRaises(ValueError): + with self.assertRaises(EOFError): self.loop.run_until_complete(read_headers(self.stream)) From c854564d2d871beba7b1150ef97d7cf2b6e4f872 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 24 Jun 2019 23:09:08 +0200 Subject: [PATCH 100/281] Convert tests for HTTP parsing to async style. Refs #403. --- tests/test_http.py | 78 ++++++++++++++++++++++------------------------ tests/utils.py | 27 ++++++++++++++++ 2 files changed, 65 insertions(+), 40 deletions(-) diff --git a/tests/test_http.py b/tests/test_http.py index 8ba1d190f..cff97fc2f 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -12,7 +12,7 @@ def setUp(self): super().setUp() self.stream = asyncio.StreamReader(loop=self.loop) - def test_read_request(self): + async def test_read_request(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( b"GET /chat HTTP/1.1\r\n" @@ -25,38 +25,38 @@ def test_read_request(self): b"Sec-WebSocket-Version: 13\r\n" b"\r\n" ) - path, headers = self.loop.run_until_complete(read_request(self.stream)) + path, headers = await read_request(self.stream) self.assertEqual(path, "/chat") self.assertEqual(headers["Upgrade"], "websocket") - def test_read_request_empty(self): + async def test_read_request_empty(self): self.stream.feed_eof() with self.assertRaisesRegex( EOFError, "connection closed while reading HTTP request line" ): - self.loop.run_until_complete(read_request(self.stream)) + await read_request(self.stream) - def test_read_request_invalid_request_line(self): + async def test_read_request_invalid_request_line(self): self.stream.feed_data(b"GET /\r\n\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP request line: GET /"): - self.loop.run_until_complete(read_request(self.stream)) + await read_request(self.stream) - def test_read_request_unsupported_method(self): + async def test_read_request_unsupported_method(self): self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") with self.assertRaisesRegex(ValueError, "unsupported HTTP method: OPTIONS"): - self.loop.run_until_complete(read_request(self.stream)) + await read_request(self.stream) - def test_read_request_unsupported_version(self): + async def test_read_request_unsupported_version(self): self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): - self.loop.run_until_complete(read_request(self.stream)) + await read_request(self.stream) - def test_read_request_invalid_header(self): + async def test_read_request_invalid_header(self): self.stream.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): - self.loop.run_until_complete(read_request(self.stream)) + await read_request(self.stream) - def test_read_response(self): + async def test_read_response(self): # Example from the protocol overview in RFC 6455 self.stream.feed_data( b"HTTP/1.1 101 Switching Protocols\r\n" @@ -66,75 +66,73 @@ def test_read_response(self): b"Sec-WebSocket-Protocol: chat\r\n" b"\r\n" ) - status_code, reason, headers = self.loop.run_until_complete( - read_response(self.stream) - ) + status_code, reason, headers = await read_response(self.stream) self.assertEqual(status_code, 101) self.assertEqual(reason, "Switching Protocols") self.assertEqual(headers["Upgrade"], "websocket") - def test_read_response_empty(self): + async def test_read_response_empty(self): self.stream.feed_eof() with self.assertRaisesRegex( EOFError, "connection closed while reading HTTP status line" ): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_request_invalid_status_line(self): + async def test_read_request_invalid_status_line(self): self.stream.feed_data(b"Hello!\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP status line: Hello!"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_response_unsupported_version(self): + async def test_read_response_unsupported_version(self): self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_response_invalid_status(self): + async def test_read_response_invalid_status(self): self.stream.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP status code: OMG"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_response_unsupported_status(self): + async def test_read_response_unsupported_status(self): self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") with self.assertRaisesRegex(ValueError, "unsupported HTTP status code: 007"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_response_invalid_reason(self): + async def test_read_response_invalid_reason(self): self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP reason phrase: \\x7f"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_read_response_invalid_header(self): + async def test_read_response_invalid_header(self): self.stream.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): - self.loop.run_until_complete(read_response(self.stream)) + await read_response(self.stream) - def test_header_name(self): + async def test_header_name(self): self.stream.feed_data(b"foo bar: baz qux\r\n\r\n") with self.assertRaises(ValueError): - self.loop.run_until_complete(read_headers(self.stream)) + await read_headers(self.stream) - def test_header_value(self): + async def test_header_value(self): self.stream.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") with self.assertRaises(ValueError): - self.loop.run_until_complete(read_headers(self.stream)) + await read_headers(self.stream) - def test_headers_limit(self): + async def test_headers_limit(self): self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") with self.assertRaises(ValueError): - self.loop.run_until_complete(read_headers(self.stream)) + await read_headers(self.stream) - def test_line_limit(self): + async def test_line_limit(self): # Header line contains 5 + 4090 + 2 = 4097 bytes. self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") with self.assertRaises(ValueError): - self.loop.run_until_complete(read_headers(self.stream)) + await read_headers(self.stream) - def test_line_ending(self): + async def test_line_ending(self): self.stream.feed_data(b"foo: bar\n\n") with self.assertRaises(EOFError): - self.loop.run_until_complete(read_headers(self.stream)) + await read_headers(self.stream) class HeadersTests(unittest.TestCase): diff --git a/tests/utils.py b/tests/utils.py index 059efba20..24cdcfa51 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,5 +1,6 @@ import asyncio import contextlib +import functools import logging import os import time @@ -12,6 +13,32 @@ class AsyncioTestCase(unittest.TestCase): """ + def __init_subclass__(cls, **kwargs): + """ + Convert test coroutines to test functions. + + This supports asychronous tests transparently. + + """ + super().__init_subclass__(**kwargs) + for name in unittest.defaultTestLoader.getTestCaseNames(cls): + test = getattr(cls, name) + if asyncio.iscoroutinefunction(test): + setattr(cls, name, cls.convert_async_to_sync(test)) + + @staticmethod + def convert_async_to_sync(test): + """ + Convert a test coroutine to a test function. + + """ + + @functools.wraps(test) + def test_func(self, *args, **kwds): + return self.loop.run_until_complete(test(self, *args, **kwds)) + + return test_func + def setUp(self): super().setUp() self.loop = asyncio.new_event_loop() From 9f9da4478bb2c3f84020abadce118dfad6d53391 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 25 Jun 2019 22:11:36 +0200 Subject: [PATCH 101/281] Clarify that extra_headers only applies on success. Refs #611. --- src/websockets/server.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index c8eb46351..d7d294c29 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -500,8 +500,9 @@ async def handshake( If provided, ``available_subprotocols`` is a list of supported subprotocols in order of decreasing preference. - If provided, ``extra_headers`` sets additional HTTP response headers. - It can be a :class:`~websockets.http.Headers` instance, a + If provided, ``extra_headers`` sets additional HTTP response headers + when the handshake succeeds. It can be a + :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` pairs, or a callable taking the request path and headers in arguments and returning one of the above. @@ -779,11 +780,11 @@ class Serve: decreasing preference * ``subprotocols`` is a list of supported subprotocols in order of decreasing preference - * ``extra_headers`` sets additional HTTP response headers — it can be a - :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` - pairs, or a callable taking the request path and headers in arguments - and returning one of the above + * ``extra_headers`` sets additional HTTP response headers when the + handshake succeeds — it can be a :class:`~websockets.http.Headers` + instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, + value)`` pairs, or a callable taking the request path and headers in + arguments and returning one of the above * ``process_request`` is a coroutine taking the request path and headers in argument, see :meth:`~WebSocketServerProtocol.process_request` for details From d5a670019dd7baf966e5822c37f45c0da4971fdd Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 26 Jun 2019 08:27:07 +0200 Subject: [PATCH 102/281] Improve error messages for hanshake failures. Fix #611. --- src/websockets/__main__.py | 6 +-- src/websockets/auth.py | 6 +-- src/websockets/client.py | 17 +++---- src/websockets/exceptions.py | 16 +++---- .../extensions/permessage_deflate.py | 16 +++---- src/websockets/framing.py | 16 +++---- src/websockets/protocol.py | 8 ++-- src/websockets/server.py | 18 +++++-- tests/test_auth.py | 6 +-- tests/test_client_server.py | 22 ++++++--- tests/test_exceptions.py | 48 +++++++++---------- 11 files changed, 98 insertions(+), 81 deletions(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 14bf655b1..57d2a823b 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -27,11 +27,11 @@ def win_enable_vt100() -> None: handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE) if handle == INVALID_HANDLE_VALUE: - raise RuntimeError("Unable to obtain stdout handle") + raise RuntimeError("unable to obtain stdout handle") cur_mode = ctypes.c_uint() if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0: - raise RuntimeError("Unable to query current console mode") + raise RuntimeError("unable to query current console mode") # ctypes ints lack support for the required bit-OR operation. # Temporarily convert to Py int, do the OR and convert back. @@ -39,7 +39,7 @@ def win_enable_vt100() -> None: new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING) if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0: - raise RuntimeError("Unable to set console mode") + raise RuntimeError("unable to set console mode") def exit_from_event_loop_thread( diff --git a/src/websockets/auth.py b/src/websockets/auth.py index 91d3d7420..60f63e9aa 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -124,7 +124,7 @@ def basic_auth_protocol_factory( """ if (credentials is None) == (check_credentials is None): - raise ValueError("Provide either credentials or check_credentials") + raise ValueError("provide either credentials or check_credentials") if credentials is not None: if is_credentials(credentials): @@ -141,10 +141,10 @@ async def check_credentials(username: str, password: str) -> bool: return credentials_dict.get(username) == password else: - raise ValueError(f"Invalid credentials argument: {credentials}") + raise ValueError(f"invalid credentials argument: {credentials}") else: - raise ValueError(f"Invalid credentials argument: {credentials}") + raise ValueError(f"invalid credentials argument: {credentials}") return functools.partial( create_protocol, realm=realm, check_credentials=check_credentials diff --git a/src/websockets/client.py b/src/websockets/client.py index 79b03d9e7..9c34d5c23 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -12,6 +12,7 @@ from .exceptions import ( InvalidHandshake, + InvalidHeader, InvalidMessage, InvalidStatusCode, NegotiationError, @@ -146,7 +147,7 @@ def process_extensions( if header_values: if available_extensions is None: - raise InvalidHandshake("No extensions supported") + raise InvalidHandshake("no extensions supported") parsed_header_values: List[ExtensionHeader] = sum( [parse_extension(header_value) for header_value in header_values], [] @@ -203,7 +204,7 @@ def process_subprotocol( if header_values: if available_subprotocols is None: - raise InvalidHandshake("No subprotocols supported") + raise InvalidHandshake("no subprotocols supported") parsed_header_values: Sequence[Subprotocol] = sum( [parse_subprotocol(header_value) for header_value in header_values], [] @@ -211,12 +212,12 @@ def process_subprotocol( if len(parsed_header_values) > 1: subprotocols = ", ".join(parsed_header_values) - raise InvalidHandshake(f"Multiple subprotocols: {subprotocols}") + raise InvalidHandshake(f"multiple subprotocols: {subprotocols}") subprotocol = parsed_header_values[0] if subprotocol not in available_subprotocols: - raise NegotiationError(f"Unsupported subprotocol: {subprotocol}") + raise NegotiationError(f"unsupported subprotocol: {subprotocol}") return subprotocol @@ -293,7 +294,7 @@ async def handshake( status_code, response_headers = await self.read_http_response() if status_code in (301, 302, 303, 307, 308): if "Location" not in response_headers: - raise InvalidMessage("Redirect response missing Location") + raise InvalidHeader("Location") raise RedirectHandshake(response_headers["Location"]) elif status_code != 101: raise InvalidStatusCode(status_code) @@ -429,7 +430,7 @@ def __init__( ClientPerMessageDeflateFactory(client_max_window_bits=True) ] elif compression is not None: - raise ValueError(f"Unsupported compression: {compression}") + raise ValueError(f"unsupported compression: {compression}") self._create_protocol = create_protocol self._ping_interval = ping_interval @@ -535,11 +536,11 @@ async def __await_impl__(self) -> WebSocketClientProtocol: except RedirectHandshake as e: wsuri = parse_uri(e.uri) if self._wsuri.secure and not wsuri.secure: - raise InvalidHandshake("Redirect dropped TLS") + raise InvalidHandshake("redirect dropped TLS") self._wsuri = wsuri continue # redirection chain continues else: - raise InvalidHandshake("Maximum redirects exceeded") + raise InvalidHandshake("maximum redirects exceeded") self.ws_client = protocol return protocol diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 22978ec6f..36a8ed4a8 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -62,7 +62,7 @@ def __init__(self, uri: str) -> None: self.uri = uri def __str__(self) -> str: - return f"Redirect to {self.uri}" + return f"redirect to {self.uri}" class InvalidMessage(InvalidHandshake): @@ -80,11 +80,11 @@ class InvalidHeader(InvalidHandshake): def __init__(self, name: str, value: Optional[str] = None) -> None: if value is None: - message = f"Missing {name} header" + message = f"missing {name} header" elif value == "": - message = f"Empty {name} header" + message = f"empty {name} header" else: - message = f"Invalid {name} header: {value}" + message = f"invalid {name} header: {value}" super().__init__(message) @@ -133,7 +133,7 @@ class InvalidStatusCode(InvalidHandshake): def __init__(self, status_code: int) -> None: self.status_code = status_code - message = f"Status code not 101: {status_code}" + message = f"server rejected WebSocket connection: HTTP {status_code}" super().__init__(message) @@ -152,7 +152,7 @@ class InvalidParameterName(NegotiationError): def __init__(self, name: str) -> None: self.name = name - message = f"Invalid parameter name: {name}" + message = f"invalid parameter name: {name}" super().__init__(message) @@ -165,7 +165,7 @@ class InvalidParameterValue(NegotiationError): def __init__(self, name: str, value: Optional[str]) -> None: self.name = name self.value = value - message = f"Invalid value for parameter {name}: {value}" + message = f"invalid value for parameter {name}: {value}" super().__init__(message) @@ -177,7 +177,7 @@ class DuplicateParameter(NegotiationError): def __init__(self, name: str) -> None: self.name = name - message = f"Duplicate parameter: {name}" + message = f"duplicate parameter: {name}" super().__init__(message) diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 2de27260f..bd4b3fa53 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -334,7 +334,7 @@ def process_response_params( """ if any(other.name == self.name for other in accepted_extensions): - raise NegotiationError(f"Received duplicate {self.name}") + raise NegotiationError(f"received duplicate {self.name}") # Request parameters are available in instance variables. @@ -360,7 +360,7 @@ def process_response_params( if self.server_no_context_takeover: if not server_no_context_takeover: - raise NegotiationError("Expected server_no_context_takeover") + raise NegotiationError("expected server_no_context_takeover") # client_no_context_takeover # @@ -390,9 +390,9 @@ def process_response_params( else: if server_max_window_bits is None: - raise NegotiationError("Expected server_max_window_bits") + raise NegotiationError("expected server_max_window_bits") elif server_max_window_bits > self.server_max_window_bits: - raise NegotiationError("Unsupported server_max_window_bits") + raise NegotiationError("unsupported server_max_window_bits") # client_max_window_bits @@ -408,7 +408,7 @@ def process_response_params( if self.client_max_window_bits is None: if client_max_window_bits is not None: - raise NegotiationError("Unexpected client_max_window_bits") + raise NegotiationError("unexpected client_max_window_bits") elif self.client_max_window_bits is True: pass @@ -417,7 +417,7 @@ def process_response_params( if client_max_window_bits is None: client_max_window_bits = self.client_max_window_bits elif client_max_window_bits > self.client_max_window_bits: - raise NegotiationError("Unsupported client_max_window_bits") + raise NegotiationError("unsupported client_max_window_bits") return PerMessageDeflate( server_no_context_takeover, # remote_no_context_takeover @@ -491,7 +491,7 @@ def process_request_params( """ if any(other.name == self.name for other in accepted_extensions): - raise NegotiationError(f"Skipped duplicate {self.name}") + raise NegotiationError(f"skipped duplicate {self.name}") # Load request parameters in local variables. ( @@ -569,7 +569,7 @@ def process_request_params( else: if client_max_window_bits is None: - raise NegotiationError("Required client_max_window_bits") + raise NegotiationError("required client_max_window_bits") elif client_max_window_bits is True: client_max_window_bits = self.client_max_window_bits elif self.client_max_window_bits < client_max_window_bits: diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 5b694fd40..d668e0c52 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -133,7 +133,7 @@ async def read( opcode = head1 & 0b00001111 if (True if head2 & 0b10000000 else False) != mask: - raise WebSocketProtocolError("Incorrect masking") + raise WebSocketProtocolError("incorrect masking") length = head2 & 0b01111111 if length == 126: @@ -144,7 +144,7 @@ async def read( length, = struct.unpack("!Q", data) if max_size is not None and length > max_size: raise PayloadTooBig( - f"Payload length exceeds size limit ({length} > {max_size} bytes)" + f"payload length exceeds size limit ({length} > {max_size} bytes)" ) if mask: mask_bits = await reader(4) @@ -252,17 +252,17 @@ def check(frame) -> None: # but it's the instance of class to which this method is bound. if frame.rsv1 or frame.rsv2 or frame.rsv3: - raise WebSocketProtocolError("Reserved bits must be 0") + raise WebSocketProtocolError("reserved bits must be 0") if frame.opcode in DATA_OPCODES: return elif frame.opcode in CTRL_OPCODES: if len(frame.data) > 125: - raise WebSocketProtocolError("Control frame too long") + raise WebSocketProtocolError("control frame too long") if not frame.fin: - raise WebSocketProtocolError("Fragmented control frame") + raise WebSocketProtocolError("fragmented control frame") else: - raise WebSocketProtocolError(f"Invalid opcode: {frame.opcode}") + raise WebSocketProtocolError(f"invalid opcode: {frame.opcode}") def prepare_data(data: Data) -> Tuple[int, bytes]: @@ -338,7 +338,7 @@ def parse_close(data: bytes) -> Tuple[int, str]: return 1005, "" else: assert length == 1 - raise WebSocketProtocolError("Close frame too short") + raise WebSocketProtocolError("close frame too short") def serialize_close(code: int, reason: str) -> bytes: @@ -358,7 +358,7 @@ def check_close(code: int) -> None: """ if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): - raise WebSocketProtocolError("Invalid status code") + raise WebSocketProtocolError("invalid status code") # at the bottom to allow circular import, because Extension depends on Frame diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index d888a9729..43dcbd4ff 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -630,7 +630,7 @@ async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: # Protect against duplicates if a payload is explicitly set. if data in self.pings: - raise ValueError("Already waiting for a pong with the same data") + raise ValueError("already waiting for a pong with the same data") # Generate a unique random payload otherwise. while data is None or data in self.pings: @@ -793,7 +793,7 @@ async def read_message(self) -> Optional[Data]: elif frame.opcode == OP_BINARY: text = False else: # frame.opcode == OP_CONT - raise WebSocketProtocolError("Unexpected opcode") + raise WebSocketProtocolError("unexpected opcode") # Shortcut for the common case - no fragmentation if frame.fin: @@ -838,9 +838,9 @@ def append(frame: Frame) -> None: while not frame.fin: frame = await self.read_data_frame(max_size=max_size) if frame is None: - raise WebSocketProtocolError("Incomplete fragmented message") + raise WebSocketProtocolError("incomplete fragmented message") if frame.opcode != OP_CONT: - raise WebSocketProtocolError("Unexpected opcode") + raise WebSocketProtocolError("unexpected opcode") append(frame) # mypy cannot figure out that chunks have the proper type. diff --git a/src/websockets/server.py b/src/websockets/server.py index d7d294c29..c37aec93f 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -147,28 +147,36 @@ async def handler(self) -> None: status, headers, body = ( http.HTTPStatus.FORBIDDEN, Headers(), - (str(exc) + "\n").encode(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), ) elif isinstance(exc, InvalidUpgrade): logger.debug("Invalid upgrade", exc_info=True) status, headers, body = ( http.HTTPStatus.UPGRADE_REQUIRED, Headers([("Upgrade", "websocket")]), - (str(exc) + "\n").encode(), + ( + f"Failed to open a WebSocket connection: {exc}.\n" + f"\n" + f"You cannot access a WebSocket server directly " + f"with a browser. You need a WebSocket client.\n" + ).encode(), ) elif isinstance(exc, InvalidHandshake): logger.debug("Invalid handshake", exc_info=True) status, headers, body = ( http.HTTPStatus.BAD_REQUEST, Headers(), - (str(exc) + "\n").encode(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), ) else: logger.warning("Error in opening handshake", exc_info=True) status, headers, body = ( http.HTTPStatus.INTERNAL_SERVER_ERROR, Headers(), - b"See server log for more information.\n", + ( + b"Failed to open a WebSocket connection.\n" + b"See server log for more information.\n" + ), ) headers.setdefault("Date", email.utils.formatdate(usegmt=True)) @@ -880,7 +888,7 @@ def __init__( ): extensions = list(extensions) + [ServerPerMessageDeflateFactory()] elif compression is not None: - raise ValueError(f"Unsupported compression: {compression}") + raise ValueError(f"unsupported compression: {compression}") factory = lambda: create_protocol( ws_handler, diff --git a/tests/test_auth.py b/tests/test_auth.py index f6aa5c424..bcd340844 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -40,13 +40,13 @@ def test_basic_auth_server_no_credentials(self): with self.assertRaises(ValueError) as raised: basic_auth_protocol_factory(realm="auth-tests", credentials=None) self.assertEqual( - str(raised.exception), "Provide either credentials or check_credentials" + str(raised.exception), "provide either credentials or check_credentials" ) def test_basic_auth_server_bad_credentials(self): with self.assertRaises(ValueError) as raised: basic_auth_protocol_factory(realm="auth-tests", credentials=42) - self.assertEqual(str(raised.exception), "Invalid credentials argument: 42") + self.assertEqual(str(raised.exception), "invalid credentials argument: 42") create_protocol_multiple_credentials = basic_auth_protocol_factory( realm="auth-tests", @@ -66,7 +66,7 @@ def test_basic_auth_bad_multiple_credentials(self): ) self.assertEqual( str(raised.exception), - "Invalid credentials argument: [('hello', 'iloveyou'), 42]", + "invalid credentials argument: [('hello', 'iloveyou'), 42]", ) async def check_credentials(username, password): diff --git a/tests/test_client_server.py b/tests/test_client_server.py index d82aa6d40..35b662eb9 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -18,7 +18,7 @@ from websockets.exceptions import ( ConnectionClosed, InvalidHandshake, - InvalidMessage, + InvalidHeader, InvalidStatusCode, NegotiationError, ) @@ -155,7 +155,7 @@ def get_server_uri(server, secure=False, resource_name="/", user_info=None): # The host and port are ignored when connecting to a Unix socket. host, port = "localhost", 0 else: # pragma: no cover - raise ValueError("Expected an IPv6, IPv4, or Unix socket") + raise ValueError("expected an IPv6, IPv4, or Unix socket") return f"{proto}://{user_info}{host}:{port}{resource_name}" @@ -429,7 +429,7 @@ def test_redirect_missing_location(self): with temp_test_redirecting_server( self, http.HTTPStatus.FOUND, include_location=False ): - with self.assertRaises(InvalidMessage): + with self.assertRaises(InvalidHeader): with temp_test_client(self): self.fail("Did not raise") # pragma: no cover @@ -1149,7 +1149,9 @@ def test_server_shuts_down_during_opening_handshake(self): with self.assertRaises(InvalidStatusCode) as raised: self.start_client() exception = raised.exception - self.assertEqual(str(exception), "Status code not 101: 503") + self.assertEqual( + str(exception), "server rejected WebSocket connection: HTTP 503" + ) self.assertEqual(exception.status_code, 503) @with_server() @@ -1197,7 +1199,9 @@ def test_invalid_status_error_during_client_connect(self): with self.assertRaises(InvalidStatusCode) as raised: self.start_client() exception = raised.exception - self.assertEqual(str(exception), "Status code not 101: 403") + self.assertEqual( + str(exception), "server rejected WebSocket connection: HTTP 403" + ) self.assertEqual(exception.status_code, 403) @with_server() @@ -1283,7 +1287,9 @@ def test_checking_origin_fails(self): server = self.loop.run_until_complete( serve(handler, "localhost", 0, origins=["http://localhost"]) ) - with self.assertRaisesRegex(InvalidHandshake, "Status code not 101: 403"): + with self.assertRaisesRegex( + InvalidHandshake, "server rejected WebSocket connection: HTTP 403" + ): self.loop.run_until_complete( connect(get_server_uri(server), origin="http://otherhost") ) @@ -1295,7 +1301,9 @@ def test_checking_origins_fails_with_multiple_headers(self): server = self.loop.run_until_complete( serve(handler, "localhost", 0, origins=["http://localhost"]) ) - with self.assertRaisesRegex(InvalidHandshake, "Status code not 101: 400"): + with self.assertRaisesRegex( + InvalidHandshake, "server rejected WebSocket connection: HTTP 400" + ): self.loop.run_until_complete( connect( get_server_uri(server), diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 27e1b53ca..fbc06e576 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -9,8 +9,8 @@ def test_str(self): for exception, exception_str in [ # fmt: off ( - InvalidHandshake("Invalid request"), - "Invalid request", + InvalidHandshake("invalid request"), + "invalid request", ), ( AbortHandshake(200, Headers(), b"OK\n"), @@ -18,70 +18,70 @@ def test_str(self): ), ( RedirectHandshake("wss://example.com"), - "Redirect to wss://example.com", + "redirect to wss://example.com", ), ( - InvalidMessage("Malformed HTTP message"), - "Malformed HTTP message", + InvalidMessage("malformed HTTP message"), + "malformed HTTP message", ), ( InvalidHeader("Name"), - "Missing Name header", + "missing Name header", ), ( InvalidHeader("Name", None), - "Missing Name header", + "missing Name header", ), ( InvalidHeader("Name", ""), - "Empty Name header", + "empty Name header", ), ( InvalidHeader("Name", "Value"), - "Invalid Name header: Value", + "invalid Name header: Value", ), ( InvalidHeaderFormat( "Sec-WebSocket-Protocol", "expected token", "a=|", 3 ), - "Invalid Sec-WebSocket-Protocol header: " + "invalid Sec-WebSocket-Protocol header: " "expected token at 3 in a=|", ), ( InvalidHeaderValue("Sec-WebSocket-Version", "42"), - "Invalid Sec-WebSocket-Version header: 42", + "invalid Sec-WebSocket-Version header: 42", ), ( InvalidUpgrade("Upgrade"), - "Missing Upgrade header", + "missing Upgrade header", ), ( InvalidUpgrade("Connection", "websocket"), - "Invalid Connection header: websocket", + "invalid Connection header: websocket", ), ( InvalidOrigin("http://bad.origin"), - "Invalid Origin header: http://bad.origin", + "invalid Origin header: http://bad.origin", ), ( InvalidStatusCode(403), - "Status code not 101: 403", + "server rejected WebSocket connection: HTTP 403", ), ( - NegotiationError("Unsupported subprotocol: spam"), - "Unsupported subprotocol: spam", + NegotiationError("unsupported subprotocol: spam"), + "unsupported subprotocol: spam", ), ( InvalidParameterName("|"), - "Invalid parameter name: |", + "invalid parameter name: |", ), ( InvalidParameterValue("a", "|"), - "Invalid value for parameter a: |", + "invalid value for parameter a: |", ), ( DuplicateParameter("a"), - "Duplicate parameter: a", + "duplicate parameter: a", ), ( InvalidState("WebSocket connection isn't established yet"), @@ -122,12 +122,12 @@ def test_str(self): "| isn't a valid URI", ), ( - PayloadTooBig("Payload length exceeds limit: 2 > 1 bytes"), - "Payload length exceeds limit: 2 > 1 bytes", + PayloadTooBig("payload length exceeds limit: 2 > 1 bytes"), + "payload length exceeds limit: 2 > 1 bytes", ), ( - WebSocketProtocolError("Invalid opcode: 7"), - "Invalid opcode: 7", + WebSocketProtocolError("invalid opcode: 7"), + "invalid opcode: 7", ), # fmt: on ]: From b55ccf8d44911d3d62d55146a72cd40d96138f58 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 26 Jun 2019 20:50:18 +0200 Subject: [PATCH 103/281] Standardize to **kwargs. There was a mix of **kwargs and **kwds, perhaps due to Guido using **kwds and websockets mirroring asyncio APIs before it was called asyncio. --- src/websockets/client.py | 18 +++++------ src/websockets/server.py | 12 +++---- tests/test_client_server.py | 62 ++++++++++++++++++------------------- tests/utils.py | 4 +-- 4 files changed, 48 insertions(+), 48 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 9c34d5c23..8dd8a0dd1 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -59,13 +59,13 @@ def __init__( extensions: Optional[Sequence[ClientExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, - **kwds: Any, + **kwargs: Any, ) -> None: self.origin = origin self.available_extensions = extensions self.available_subprotocols = subprotocols self.extra_headers = extra_headers - super().__init__(**kwds) + super().__init__(**kwargs) def write_http_request(self, path: str, headers: Headers) -> None: """ @@ -387,7 +387,7 @@ def __init__( extensions: Optional[Sequence[ClientExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, - **kwds: Any, + **kwargs: Any, ) -> None: # Backwards compatibility: close_timeout used to be called timeout. if timeout is None: @@ -412,8 +412,8 @@ def __init__( self._wsuri = parse_uri(uri) if self._wsuri.secure: - kwds.setdefault("ssl", True) - elif kwds.get("ssl") is not None: + kwargs.setdefault("ssl", True) + elif kwargs.get("ssl") is not None: raise ValueError( "connect() received a SSL context for a ws:// URI, " "use a wss:// URI to enable TLS" @@ -449,13 +449,13 @@ def __init__( self._extensions = extensions self._subprotocols = subprotocols self._extra_headers = extra_headers - self._kwds = kwds + self._kwargs = kwargs async def _creating_connection( self ) -> Tuple[asyncio.Transport, WebSocketClientProtocol]: if self._wsuri.secure: - self._kwds.setdefault("ssl", True) + self._kwargs.setdefault("ssl", True) factory = lambda: self._create_protocol( host=self._wsuri.host, @@ -478,7 +478,7 @@ async def _creating_connection( host: Optional[str] port: Optional[int] - if self._kwds.get("sock") is None: + if self._kwargs.get("sock") is None: host, port = self._wsuri.host, self._wsuri.port else: # If sock is given, host and port mustn't be specified. @@ -490,7 +490,7 @@ async def _creating_connection( # This is a coroutine object. # https://github.com/python/typeshed/pull/2756 transport, protocol = await self._loop.create_connection( # type: ignore - factory, host, port, **self._kwds + factory, host, port, **self._kwargs ) transport = cast(asyncio.Transport, transport) protocol = cast(WebSocketClientProtocol, protocol) diff --git a/src/websockets/server.py b/src/websockets/server.py index c37aec93f..547656e0c 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -89,7 +89,7 @@ def __init__( select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] ] = None, - **kwds: Any, + **kwargs: Any, ) -> None: # For backwards-compatibility with 6.0 or earlier. if origins is not None and "" in origins: @@ -103,7 +103,7 @@ def __init__( self.extra_headers = extra_headers self._process_request = process_request self._select_subprotocol = select_subprotocol - super().__init__(**kwds) + super().__init__(**kwargs) def connection_made(self, transport: asyncio.BaseTransport) -> None: """ @@ -852,7 +852,7 @@ def __init__( select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] ] = None, - **kwds: Any, + **kwargs: Any, ) -> None: # Backwards-compatibility: close_timeout used to be called timeout. if timeout is None: @@ -877,7 +877,7 @@ def __init__( ws_server = WebSocketServer(loop) - secure = kwds.get("ssl") is not None + secure = kwargs.get("ssl") is not None if compression == "deflate": if extensions is None: @@ -917,9 +917,9 @@ def __init__( # https://github.com/python/typeshed/pull/2763 host = cast(str, host) port = cast(int, port) - creating_server = loop.create_server(factory, host, port, **kwds) + creating_server = loop.create_server(factory, host, port, **kwargs) else: - creating_server = loop.create_unix_server(factory, path, **kwds) + creating_server = loop.create_unix_server(factory, path, **kwargs) # This is a coroutine object. self._creating_server = creating_server diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 35b662eb9..613143dbb 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -71,8 +71,8 @@ async def handler(ws, path): @contextlib.contextmanager -def temp_test_server(test, **kwds): - test.start_server(**kwds) +def temp_test_server(test, **kwargs): + test.start_server(**kwargs) try: yield finally: @@ -91,15 +91,15 @@ def temp_test_redirecting_server( @contextlib.contextmanager -def temp_test_client(test, *args, **kwds): - test.start_client(*args, **kwds) +def temp_test_client(test, *args, **kwargs): + test.start_client(*args, **kwargs) try: yield finally: test.stop_client() -def with_manager(manager, *args, **kwds): +def with_manager(manager, *args, **kwargs): """ Return a decorator that wraps a function with a context manager. @@ -107,29 +107,29 @@ def with_manager(manager, *args, **kwds): def decorate(func): @functools.wraps(func) - def _decorate(self, *_args, **_kwds): - with manager(self, *args, **kwds): - return func(self, *_args, **_kwds) + def _decorate(self, *_args, **_kwargs): + with manager(self, *args, **kwargs): + return func(self, *_args, **_kwargs) return _decorate return decorate -def with_server(**kwds): +def with_server(**kwargs): """ Return a decorator for TestCase methods that starts and stops a server. """ - return with_manager(temp_test_server, **kwds) + return with_manager(temp_test_server, **kwargs) -def with_client(*args, **kwds): +def with_client(*args, **kwargs): """ Return a decorator for TestCase methods that starts and stops a client. """ - return with_manager(temp_test_client, *args, **kwds) + return with_manager(temp_test_client, *args, **kwargs) def get_server_uri(server, secure=False, resource_name="/", user_info=None): @@ -240,14 +240,14 @@ def setUp(self): def server_context(self): return None - def start_server(self, expected_warning=None, **kwds): + def start_server(self, expected_warning=None, **kwargs): # Disable compression by default in tests. - kwds.setdefault("compression", None) + kwargs.setdefault("compression", None) # Disable pings by default in tests. - kwds.setdefault("ping_interval", None) + kwargs.setdefault("ping_interval", None) with warnings.catch_warnings(record=True) as recorded_warnings: - start_server = serve(handler, "localhost", 0, **kwds) + start_server = serve(handler, "localhost", 0, **kwargs) self.server = self.loop.run_until_complete(start_server) if expected_warning is None: @@ -280,18 +280,18 @@ async def process_request(path, headers): self.redirecting_server = self.loop.run_until_complete(start_server) def start_client( - self, resource_name="/", user_info=None, expected_warning=None, **kwds + self, resource_name="/", user_info=None, expected_warning=None, **kwargs ): # Disable compression by default in tests. - kwds.setdefault("compression", None) + kwargs.setdefault("compression", None) # Disable pings by default in tests. - kwds.setdefault("ping_interval", None) - secure = kwds.get("ssl") is not None + kwargs.setdefault("ping_interval", None) + secure = kwargs.get("ssl") is not None server = self.redirecting_server if self.redirecting_server else self.server server_uri = get_server_uri(server, secure, resource_name, user_info) with warnings.catch_warnings(record=True) as recorded_warnings: - start_client = connect(server_uri, **kwds) + start_client = connect(server_uri, **kwargs) self.client = self.loop.run_until_complete(start_client) if expected_warning is None: @@ -331,13 +331,13 @@ def stop_redirecting_server(self): self.redirecting_server = None @contextlib.contextmanager - def temp_server(self, **kwds): - with temp_test_server(self, **kwds): + def temp_server(self, **kwargs): + with temp_test_server(self, **kwargs): yield @contextlib.contextmanager - def temp_client(self, *args, **kwds): - with temp_test_client(self, *args, **kwds): + def temp_client(self, *args, **kwargs): + with temp_test_client(self, *args, **kwargs): yield def make_http_request(self, path="/", headers=None): @@ -377,13 +377,13 @@ def client_context(self): ssl_context.load_verify_locations(testcert) return ssl_context - def start_server(self, **kwds): - kwds.setdefault("ssl", self.server_context) - super().start_server(**kwds) + def start_server(self, **kwargs): + kwargs.setdefault("ssl", self.server_context) + super().start_server(**kwargs) - def start_client(self, path="/", **kwds): - kwds.setdefault("ssl", self.client_context) - super().start_client(path, **kwds) + def start_client(self, path="/", **kwargs): + kwargs.setdefault("ssl", self.client_context) + super().start_client(path, **kwargs) class CommonClientServerTests: diff --git a/tests/utils.py b/tests/utils.py index 24cdcfa51..2c067f8e6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -34,8 +34,8 @@ def convert_async_to_sync(test): """ @functools.wraps(test) - def test_func(self, *args, **kwds): - return self.loop.run_until_complete(test(self, *args, **kwds)) + def test_func(self, *args, **kwargs): + return self.loop.run_until_complete(test(self, *args, **kwargs)) return test_func From c1dd59331749a859bc79201c8da62ea3a71811a9 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Jun 2019 10:11:24 +0200 Subject: [PATCH 104/281] Refer to TLS consistently. And clarify the relationship with SSL. --- docs/faq.rst | 4 ++-- docs/intro.rst | 3 ++- src/websockets/client.py | 2 +- tests/test_client_server.py | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/faq.rst b/docs/faq.rst index 6c5352668..3dfdb5bcd 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -124,8 +124,8 @@ See `issue 414`_. .. _issue 414: https://github.com/aaugustin/websockets/issues/414 -How do I disable SSL certificate verification? -.............................................. +How do I disable TLS/SSL certificate verification? +.................................................. Look at the ``ssl`` argument of :meth:`~asyncio.loop.create_connection`. diff --git a/docs/intro.rst b/docs/intro.rst index 8decd462d..14ba1b38a 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -54,7 +54,8 @@ Secure WebSocket connections improve confidentiality and also reliability because they reduce the risk of interference by bad proxies. The WSS protocol is to WS what HTTPS is to HTTP: the connection is encrypted -with TLS. WSS requires TLS certificates like HTTPS. +with Transport Layer Security (TLS) — which is often referred to as Secure +Sockets Layer (SSL). WSS requires TLS certificates like HTTPS. Here's how to adapt the server example to provide secure connections. See the documentation of the :mod:`ssl` module for configuring the context securely. diff --git a/src/websockets/client.py b/src/websockets/client.py index 8dd8a0dd1..110e61f69 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -415,7 +415,7 @@ def __init__( kwargs.setdefault("ssl", True) elif kwargs.get("ssl") is not None: raise ValueError( - "connect() received a SSL context for a ws:// URI, " + "connect() received a ssl argument for a ws:// URI, " "use a wss:// URI to enable TLS" ) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 613143dbb..a88002364 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1215,7 +1215,7 @@ def test_connection_error_during_opening_handshake( _read_http_request.side_effect = ConnectionError # This exception is currently platform-dependent. It was observed to - # be ConnectionResetError on Linux in the non-SSL case, and + # be ConnectionResetError on Linux in the non-TLS case, and # InvalidMessage otherwise (including both Linux and macOS). This # doesn't matter though since this test is primarily for testing a # code path on the server side. From e146ace7caf42462af79de4fe3d0e0c4f1e2e8dc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Jun 2019 10:15:28 +0200 Subject: [PATCH 105/281] Add consistency checks on serve() arguments. It's only possible to hit these assertions by not respecting the documented signatures of serve() and unix_serve(). --- src/websockets/server.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index 547656e0c..7c268c257 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -914,11 +914,16 @@ def __init__( ) if path is None: + # serve(..., host, port) must specify host and port parameters. + # host can be None to listen on all interfaces; port cannot be None. + assert port is not None # https://github.com/python/typeshed/pull/2763 - host = cast(str, host) - port = cast(int, port) - creating_server = loop.create_server(factory, host, port, **kwargs) + creating_server = loop.create_server( # type: ignore + factory, host, port, **kwargs + ) else: + # unix_serve(path) must not specify host and port parameters. + assert host is None and port is None creating_server = loop.create_unix_server(factory, path, **kwargs) # This is a coroutine object. @@ -966,6 +971,8 @@ def unix_serve( """ Similar to :func:`serve()`, but for listening on Unix sockets. + ``path`` is the path to the Unix socket. + This function calls the event loop's :meth:`~asyncio.AbstractEventLoop.create_unix_server` method. From 87a9ec06ce119ad50bb54a250514fc426e8ad370 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 27 Jun 2019 07:54:17 +0200 Subject: [PATCH 106/281] Move SecurityError to exceptions module. --- src/websockets/exceptions.py | 8 ++++++++ src/websockets/http.py | 11 ++++------- tests/test_exceptions.py | 5 +++++ tests/test_http.py | 5 +++-- 4 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 36a8ed4a8..ce2c1e64b 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -25,6 +25,7 @@ "NegotiationError", "PayloadTooBig", "RedirectHandshake", + "SecurityError", "WebSocketProtocolError", ] @@ -52,6 +53,13 @@ def __init__( super().__init__(message) +class SecurityError(InvalidHandshake): + """ + Exception raised when a HTTP request or response breaks security rules. + + """ + + class RedirectHandshake(InvalidHandshake): """ Exception raised when a handshake gets redirected. diff --git a/src/websockets/http.py b/src/websockets/http.py index 6fbe5eb31..04424c6c5 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -39,13 +39,6 @@ USER_AGENT = f"Python/{sys.version[:3]} websockets/{websockets_version}" -class SecurityError(ValueError): - """ - HTTP request or response exceeds security limits. - - """ - - def d(value: bytes) -> str: """ Decode a bytestring for interpolating into an error message. @@ -211,6 +204,8 @@ async def read_headers(stream: asyncio.StreamReader) -> "Headers": headers[name] = value else: + from .exceptions import SecurityError # avoid circular import + raise SecurityError("too many HTTP headers") return headers @@ -229,6 +224,8 @@ async def read_line(stream: asyncio.StreamReader) -> bytes: line = await stream.readline() # Security: this guarantees header values are small (hard-coded = 4 KiB) if len(line) > MAX_LINE: + from .exceptions import SecurityError # avoid circular import + raise SecurityError("line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 if not line.endswith(b"\r\n"): diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index fbc06e576..2cbd78671 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -16,6 +16,11 @@ def test_str(self): AbortHandshake(200, Headers(), b"OK\n"), "HTTP 200, 0 headers, 3 bytes", ), + ( + SecurityError("redirect from WSS to WS"), + "redirect from WSS to WS", + + ), ( RedirectHandshake("wss://example.com"), "redirect to wss://example.com", diff --git a/tests/test_http.py b/tests/test_http.py index cff97fc2f..41b522c3d 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,6 +1,7 @@ import asyncio import unittest +from websockets.exceptions import SecurityError from websockets.http import * from websockets.http import read_headers @@ -120,13 +121,13 @@ async def test_header_value(self): async def test_headers_limit(self): self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") - with self.assertRaises(ValueError): + with self.assertRaises(SecurityError): await read_headers(self.stream) async def test_line_limit(self): # Header line contains 5 + 4090 + 2 = 4097 bytes. self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") - with self.assertRaises(ValueError): + with self.assertRaises(SecurityError): await read_headers(self.stream) async def test_line_ending(self): From 626544bc58565b19dc11f74ebe9b8fe25ff411b6 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 27 Jun 2019 21:44:40 +0200 Subject: [PATCH 107/281] Refactor redirect handling in connect(). This reverts parts of 00458f27 and uses a less usual but less verbose approach. _redirect() make look like a hack but it uses public APIs. This approach minimizes divergence between the client and server implementations. Also it will make it easier to implement new features in connect(). --- src/websockets/client.py | 132 +++++++++++++++++++-------------------- src/websockets/server.py | 19 +++--- 2 files changed, 75 insertions(+), 76 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 110e61f69..943c4bbe7 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -5,6 +5,7 @@ import asyncio import collections.abc +import functools import logging import warnings from types import TracebackType @@ -17,6 +18,7 @@ InvalidStatusCode, NegotiationError, RedirectHandshake, + SecurityError, ) from .extensions.base import ClientExtensionFactory, Extension from .extensions.permessage_deflate import ClientPerMessageDeflateFactory @@ -410,8 +412,8 @@ def __init__( if loop is None: loop = asyncio.get_event_loop() - self._wsuri = parse_uri(uri) - if self._wsuri.secure: + wsuri = parse_uri(uri) + if wsuri.secure: kwargs.setdefault("ssl", True) elif kwargs.get("ssl") is not None: raise ValueError( @@ -432,69 +434,65 @@ def __init__( elif compression is not None: raise ValueError(f"unsupported compression: {compression}") - self._create_protocol = create_protocol - self._ping_interval = ping_interval - self._ping_timeout = ping_timeout - self._close_timeout = close_timeout - self._max_size = max_size - self._max_queue = max_queue - self._read_limit = read_limit - self._write_limit = write_limit - self._loop = loop - self._legacy_recv = legacy_recv - self._klass = klass - self._timeout = timeout - self._compression = compression - self._origin = origin - self._extensions = extensions - self._subprotocols = subprotocols - self._extra_headers = extra_headers - self._kwargs = kwargs - - async def _creating_connection( - self - ) -> Tuple[asyncio.Transport, WebSocketClientProtocol]: - if self._wsuri.secure: - self._kwargs.setdefault("ssl", True) - - factory = lambda: self._create_protocol( - host=self._wsuri.host, - port=self._wsuri.port, - secure=self._wsuri.secure, - ping_interval=self._ping_interval, - ping_timeout=self._ping_timeout, - close_timeout=self._close_timeout, - max_size=self._max_size, - max_queue=self._max_queue, - read_limit=self._read_limit, - write_limit=self._write_limit, - loop=self._loop, - legacy_recv=self._legacy_recv, - origin=self._origin, - extensions=self._extensions, - subprotocols=self._subprotocols, - extra_headers=self._extra_headers, + factory = functools.partial( + create_protocol, + host=wsuri.host, + port=wsuri.port, + secure=wsuri.secure, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + close_timeout=close_timeout, + max_size=max_size, + max_queue=max_queue, + read_limit=read_limit, + write_limit=write_limit, + loop=loop, + legacy_recv=legacy_recv, + origin=origin, + extensions=extensions, + subprotocols=subprotocols, + extra_headers=extra_headers, ) host: Optional[str] port: Optional[int] - if self._kwargs.get("sock") is None: - host, port = self._wsuri.host, self._wsuri.port + if kwargs.get("sock") is None: + host, port = wsuri.host, wsuri.port else: - # If sock is given, host and port mustn't be specified. + # If sock is given, host and port shouldn't be specified. host, port = None, None - self._wsuri = self._wsuri - self._origin = self._origin + # This is a coroutine function. + self._create_connection = functools.partial( + loop.create_connection, factory, host, port, **kwargs + ) + + self._wsuri = wsuri + self._origin = origin - # This is a coroutine object. - # https://github.com/python/typeshed/pull/2756 - transport, protocol = await self._loop.create_connection( # type: ignore - factory, host, port, **self._kwargs + def _redirect(self, uri: str) -> None: + old_wsuri = self._wsuri + factory, old_host, old_port = self._create_connection.args + + new_wsuri = parse_uri(uri) + new_host, new_port = new_wsuri.host, new_wsuri.port + if old_wsuri.secure and not new_wsuri.secure: + raise SecurityError("redirect from WSS to WS") + + # Replace the host and port argument passed to the protocol factory. + factory = self._create_connection.args[0] + factory_keywords = dict(factory.keywords, host=new_host, port=new_port) + factory = functools.partial(factory.func, *factory.args, **factory_keywords) + + # Replace the host and port argument passed to create_connection. + create_connection_args = (factory, new_host, new_port) + self._create_connection = functools.partial( + self._create_connection.func, + *create_connection_args, + **self._create_connection.keywords, ) - transport = cast(asyncio.Transport, transport) - protocol = cast(WebSocketClientProtocol, protocol) - return transport, protocol + + self._wsuri = new_wsuri # async with connect(...) @@ -517,7 +515,10 @@ def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: async def __await_impl__(self) -> WebSocketClientProtocol: for redirects in range(self.MAX_REDIRECTS_ALLOWED): - transport, protocol = await self._creating_connection() + transport, protocol = await self._create_connection() + # https://github.com/python/typeshed/pull/2756 + transport = cast(asyncio.Transport, transport) + protocol = cast(WebSocketClientProtocol, protocol) try: try: @@ -528,22 +529,17 @@ async def __await_impl__(self) -> WebSocketClientProtocol: available_subprotocols=protocol.available_subprotocols, extra_headers=protocol.extra_headers, ) - break # redirection chain ended except Exception: protocol.fail_connection() await protocol.wait_closed() raise - except RedirectHandshake as e: - wsuri = parse_uri(e.uri) - if self._wsuri.secure and not wsuri.secure: - raise InvalidHandshake("redirect dropped TLS") - self._wsuri = wsuri - continue # redirection chain continues + else: + self.ws_client = protocol + return protocol + except RedirectHandshake as exc: + self._redirect(exc.uri) else: - raise InvalidHandshake("maximum redirects exceeded") - - self.ws_client = protocol - return protocol + raise SecurityError("too many redirects") # yield from connect(...) diff --git a/src/websockets/server.py b/src/websockets/server.py index 7c268c257..c02b67e03 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -6,6 +6,7 @@ import asyncio import collections.abc import email.utils +import functools import http import logging import socket @@ -890,7 +891,8 @@ def __init__( elif compression is not None: raise ValueError(f"unsupported compression: {compression}") - factory = lambda: create_protocol( + factory = functools.partial( + create_protocol, ws_handler, ws_server, host=host, @@ -917,17 +919,18 @@ def __init__( # serve(..., host, port) must specify host and port parameters. # host can be None to listen on all interfaces; port cannot be None. assert port is not None - # https://github.com/python/typeshed/pull/2763 - creating_server = loop.create_server( # type: ignore - factory, host, port, **kwargs + create_server = functools.partial( + loop.create_server, factory, host, port, **kwargs ) else: # unix_serve(path) must not specify host and port parameters. assert host is None and port is None - creating_server = loop.create_unix_server(factory, path, **kwargs) + create_server = functools.partial( + loop.create_unix_server, factory, path, **kwargs + ) - # This is a coroutine object. - self._creating_server = creating_server + # This is a coroutine function. + self._create_server = create_server self.ws_server = ws_server # async with serve(...) @@ -951,7 +954,7 @@ def __await__(self) -> Generator[Any, None, WebSocketServer]: return self.__await_impl__().__await__() async def __await_impl__(self) -> WebSocketServer: - server = await self._creating_server + server = await self._create_server() self.ws_server.wrap(server) return self.ws_server From 721ef99dab6efebbf1aad29ba127387f5e129855 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Jun 2019 10:31:06 +0200 Subject: [PATCH 108/281] Add unix_connect to connect to a Unix socket.. Fix #539. --- docs/api.rst | 3 +++ docs/changelog.rst | 2 ++ example/unix_client.py | 19 ++++++++++++++++ example/unix_server.py | 22 ++++++++++++++++++ src/websockets/client.py | 45 ++++++++++++++++++++++++++++--------- tests/test_client_server.py | 16 +++++-------- 6 files changed, 86 insertions(+), 21 deletions(-) create mode 100755 example/unix_client.py create mode 100755 example/unix_server.py diff --git a/docs/api.rst b/docs/api.rst index ef567ed5b..56372eb11 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -59,6 +59,9 @@ Client .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) :async: + .. autofunction:: unix_connect(path, uri="ws://localhost/", *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) + :async: + .. autoclass:: WebSocketClientProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) .. automethod:: handshake diff --git a/docs/changelog.rst b/docs/changelog.rst index 92cbce58f..761c8b8fc 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -56,6 +56,8 @@ Also: * :func:`~client.connect` handles redirects from the server during the handshake. +* Added :func:`~client.unix_connect` for connecting to Unix sockets. + * Improved support for sending fragmented messages by accepting asynchronous iterators in :meth:`~protocol.WebSocketCommonProtocol.send`. diff --git a/example/unix_client.py b/example/unix_client.py new file mode 100755 index 000000000..577135b3d --- /dev/null +++ b/example/unix_client.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python + +# WS client example connecting to a Unix socket + +import asyncio +import os.path +import websockets + +async def hello(): + socket_path = os.path.join(os.path.dirname(__file__), "socket") + async with websockets.unix_connect(socket_path) as websocket: + name = input("What's your name? ") + await websocket.send(name) + print(f"> {name}") + + greeting = await websocket.recv() + print(f"< {greeting}") + +asyncio.get_event_loop().run_until_complete(hello()) diff --git a/example/unix_server.py b/example/unix_server.py new file mode 100755 index 000000000..a6ec0168a --- /dev/null +++ b/example/unix_server.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +# WS server example listening on a Unix socket + +import asyncio +import os.path +import websockets + +async def hello(websocket, path): + name = await websocket.recv() + print(f"< {name}") + + greeting = f"Hello {name}!" + + await websocket.send(greeting) + print(f"> {greeting}") + +socket_path = os.path.join(os.path.dirname(__file__), "socket") +start_server = websockets.unix_serve(hello, socket_path) + +asyncio.get_event_loop().run_until_complete(start_server) +asyncio.get_event_loop().run_forever() diff --git a/src/websockets/client.py b/src/websockets/client.py index 943c4bbe7..4da8c3b50 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -37,7 +37,7 @@ from .uri import WebSocketURI, parse_uri -__all__ = ["connect", "WebSocketClientProtocol"] +__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] logger = logging.getLogger(__name__) @@ -372,6 +372,7 @@ def __init__( self, uri: str, *, + path: Optional[str] = None, create_protocol: Optional[Type[WebSocketClientProtocol]] = None, ping_interval: float = 20, ping_timeout: float = 20, @@ -454,19 +455,24 @@ def __init__( extra_headers=extra_headers, ) - host: Optional[str] - port: Optional[int] - if kwargs.get("sock") is None: - host, port = wsuri.host, wsuri.port + if path is None: + host: Optional[str] + port: Optional[int] + if kwargs.get("sock") is None: + host, port = wsuri.host, wsuri.port + else: + # If sock is given, host and port shouldn't be specified. + host, port = None, None + create_connection = functools.partial( + loop.create_connection, factory, host, port, **kwargs + ) else: - # If sock is given, host and port shouldn't be specified. - host, port = None, None + create_connection = functools.partial( + loop.create_unix_connection, factory, path, **kwargs + ) # This is a coroutine function. - self._create_connection = functools.partial( - loop.create_connection, factory, host, port, **kwargs - ) - + self._create_connection = create_connection self._wsuri = wsuri self._origin = origin @@ -547,3 +553,20 @@ async def __await_impl__(self) -> WebSocketClientProtocol: connect = Connect + + +def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Connect: + """ + Similar to :func:`connect`, but for connecting to a Unix socket. + + ``path`` is the path to the Unix socket. ``uri`` is the WebSocket URI. + + This function calls the event loop's + :meth:`~asyncio.AbstractEventLoop.create_unix_connection` method. + + It is only available on Unix. + + It's mainly useful for debugging servers listening on Unix sockets. + + """ + return connect(uri=uri, path=path, **kwargs) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index a88002364..738d92ff0 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -151,9 +151,6 @@ def get_server_uri(server, secure=False, resource_name="/", user_info=None): host = f"[{host}]" elif server_socket.family == socket.AF_INET: host, port = server_socket.getsockname() - elif server_socket.family == socket.AF_UNIX: - # The host and port are ignored when connecting to a Unix socket. - host, port = "localhost", 0 else: # pragma: no cover raise ValueError("expected an IPv6, IPv4, or Unix socket") @@ -489,18 +486,17 @@ def test_unix_socket(self): # Like self.start_server() but with unix_serve(). unix_server = unix_serve(handler, path) self.server = self.loop.run_until_complete(unix_server) - - client_socket = socket.socket(socket.AF_UNIX) - client_socket.connect(path) - try: - with self.temp_client(sock=client_socket): + # Like self.start_client() but with unix_connect() + unix_client = unix_connect(path) + self.client = self.loop.run_until_complete(unix_client) + try: self.loop.run_until_complete(self.client.send("Hello!")) reply = self.loop.run_until_complete(self.client.recv()) self.assertEqual(reply, "Hello!") - + finally: + self.stop_client() finally: - client_socket.close() self.stop_server() async def process_request_OK(path, request_headers): From 752f4145cd06d303f7ac2ddc8c1fdffe9c492bff Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Jun 2019 12:52:11 +0200 Subject: [PATCH 109/281] Support overriding host and port in connect(). Fix #540. Thanks @Kirill888 for the report and initial patch. --- docs/changelog.rst | 2 ++ src/websockets/client.py | 41 +++++++++++++++++++++++-------------- tests/test_client_server.py | 23 +++++++++++++++++++-- 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 761c8b8fc..7a02ec0e7 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -56,6 +56,8 @@ Also: * :func:`~client.connect` handles redirects from the server during the handshake. +* :func:`~client.connect` supports overriding ``host`` and ``port``. + * Added :func:`~client.unix_connect` for connecting to Unix sockets. * Improved support for sending fragmented messages by accepting asynchronous diff --git a/src/websockets/client.py b/src/websockets/client.py index 4da8c3b50..abcf9dc62 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -334,6 +334,11 @@ class Connect: a ``wss://`` URI, if this argument isn't provided explicitly, it's set to ``True``, which means Python's default :class:`~ssl.SSLContext` is used. + You can connect to a different host and port from those found in ``uri`` + by setting ``host`` and ``port`` keyword arguments. This only changes the + destination of the TCP connection; the hostname from ``uri`` is still used + in the TLS handshake for secure connections and in the ``Host`` header. + The behavior of the ``ping_interval``, ``ping_timeout``, ``close_timeout``, ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` optional arguments is described in the documentation of @@ -463,6 +468,9 @@ def __init__( else: # If sock is given, host and port shouldn't be specified. host, port = None, None + # If host and port are given, override values from the URI. + host = kwargs.pop("host", host) + port = kwargs.pop("port", port) create_connection = functools.partial( loop.create_connection, factory, host, port, **kwargs ) @@ -478,25 +486,28 @@ def __init__( def _redirect(self, uri: str) -> None: old_wsuri = self._wsuri - factory, old_host, old_port = self._create_connection.args - new_wsuri = parse_uri(uri) - new_host, new_port = new_wsuri.host, new_wsuri.port + if old_wsuri.secure and not new_wsuri.secure: raise SecurityError("redirect from WSS to WS") - # Replace the host and port argument passed to the protocol factory. - factory = self._create_connection.args[0] - factory_keywords = dict(factory.keywords, host=new_host, port=new_port) - factory = functools.partial(factory.func, *factory.args, **factory_keywords) - - # Replace the host and port argument passed to create_connection. - create_connection_args = (factory, new_host, new_port) - self._create_connection = functools.partial( - self._create_connection.func, - *create_connection_args, - **self._create_connection.keywords, - ) + # Only rewrite the host and port arguments is they change in the URI. + # This preserves connection overrides with the host, port, or sock + # arguments if the redirect points to the same host and port. + if old_wsuri.host != new_wsuri.host or old_wsuri.port != new_wsuri.port: + # Replace the host and port argument passed to the protocol factory. + factory = self._create_connection.args[0] + factory = functools.partial( + factory.func, + *factory.args, + **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port), + ) + # Replace the host and port argument passed to create_connection. + self._create_connection = functools.partial( + self._create_connection.func, + *(factory, new_wsuri.host, new_wsuri.port), + **self._create_connection.keywords, + ) self._wsuri = new_wsuri diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 738d92ff0..7281ec6bd 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -31,6 +31,7 @@ from websockets.http import USER_AGENT, Headers, read_response from websockets.protocol import State from websockets.server import * +from websockets.uri import parse_uri from .test_protocol import MS from .utils import AsyncioTestCase @@ -284,8 +285,11 @@ def start_client( # Disable pings by default in tests. kwargs.setdefault("ping_interval", None) secure = kwargs.get("ssl") is not None - server = self.redirecting_server if self.redirecting_server else self.server - server_uri = get_server_uri(server, secure, resource_name, user_info) + try: + server_uri = kwargs.pop("uri") + except KeyError: + server = self.redirecting_server if self.redirecting_server else self.server + server_uri = get_server_uri(server, secure, resource_name, user_info) with warnings.catch_warnings(record=True) as recorded_warnings: start_client = connect(server_uri, **kwargs) @@ -437,6 +441,21 @@ def test_explicit_event_loop(self): reply = self.loop.run_until_complete(self.client.recv()) self.assertEqual(reply, "Hello!") + @with_server() + def test_explicit_host_port(self): + uri = get_server_uri(self.server, self.secure) + wsuri = parse_uri(uri) + + # Change host and port to invalid values. + changed_uri = uri.replace(wsuri.host, "example.com").replace( + str(wsuri.port), str(65535 - wsuri.port) + ) + + with self.temp_client(uri=changed_uri, host=wsuri.host, port=wsuri.port): + self.loop.run_until_complete(self.client.send("Hello!")) + reply = self.loop.run_until_complete(self.client.recv()) + self.assertEqual(reply, "Hello!") + @with_server() def test_explicit_socket(self): class TrackedSocket(socket.socket): From f967833ee3c8215e49edd4033d1efb3985a895ad Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 29 Jun 2019 17:47:20 +0200 Subject: [PATCH 110/281] Minor code changes for readability. --- src/websockets/client.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index abcf9dc62..10435c1ff 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -484,17 +484,23 @@ def __init__( self._wsuri = wsuri self._origin = origin - def _redirect(self, uri: str) -> None: + def handle_redirect(self, uri: str) -> None: + # Update the state of this instance to connect to a new URI. old_wsuri = self._wsuri new_wsuri = parse_uri(uri) + # Forbid TLS downgrade. if old_wsuri.secure and not new_wsuri.secure: raise SecurityError("redirect from WSS to WS") - # Only rewrite the host and port arguments is they change in the URI. - # This preserves connection overrides with the host, port, or sock + same_origin = ( + old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port + ) + + # Rewrite the host and port arguments for cross-origin redirects. + # This preserves connection overrides with the host and port # arguments if the redirect points to the same host and port. - if old_wsuri.host != new_wsuri.host or old_wsuri.port != new_wsuri.port: + if not same_origin: # Replace the host and port argument passed to the protocol factory. factory = self._create_connection.args[0] factory = functools.partial( @@ -509,6 +515,7 @@ def _redirect(self, uri: str) -> None: **self._create_connection.keywords, ) + # Set the new WebSocket URI. This suffices for same-origin redirects. self._wsuri = new_wsuri # async with connect(...) @@ -554,7 +561,7 @@ async def __await_impl__(self) -> WebSocketClientProtocol: self.ws_client = protocol return protocol except RedirectHandshake as exc: - self._redirect(exc.uri) + self.handle_redirect(exc.uri) else: raise SecurityError("too many redirects") From dd653dbe551a88dec4491fd5d83c9eefa236213a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Jun 2019 14:32:22 +0200 Subject: [PATCH 111/281] Add WebSocketServer to server.__all__. Fix #562. Thanks @lgrahl for the suggestion. --- src/websockets/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index c02b67e03..8e1db9b7c 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -50,7 +50,7 @@ from .typing import Origin, Subprotocol -__all__ = ["serve", "unix_serve", "WebSocketServerProtocol"] +__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] logger = logging.getLogger(__name__) From 6386867594a685c026099fb307ae0efd36ca6095 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Jun 2019 21:48:44 +0200 Subject: [PATCH 112/281] Handle import loops consistently. --- src/websockets/http.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/websockets/http.py b/src/websockets/http.py index 04424c6c5..46b09c2e6 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -204,9 +204,7 @@ async def read_headers(stream: asyncio.StreamReader) -> "Headers": headers[name] = value else: - from .exceptions import SecurityError # avoid circular import - - raise SecurityError("too many HTTP headers") + raise websockets.exceptions.SecurityError("too many HTTP headers") return headers @@ -224,9 +222,7 @@ async def read_line(stream: asyncio.StreamReader) -> bytes: line = await stream.readline() # Security: this guarantees header values are small (hard-coded = 4 KiB) if len(line) > MAX_LINE: - from .exceptions import SecurityError # avoid circular import - - raise SecurityError("line too long") + raise websockets.exceptions.SecurityError("line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 if not line.endswith(b"\r\n"): raise EOFError("line without CRLF") @@ -364,3 +360,7 @@ def raw_items(self) -> Iterator[Tuple[str, str]]: HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]] + + +# at the bottom to allow circular import, because AbortHandshake depends on HeadersLike +import websockets.exceptions # isort:skip # noqa From e832c565b6ac85b5c1a80c7e6eab15eaead31440 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 1 Jul 2019 21:37:09 +0200 Subject: [PATCH 113/281] Fix InvalidStateError when failing the connection. This exception occurred when: - the incoming queue was full - the connection terminated with an error - recv() was called at the wrong time Fix #634. --- src/websockets/protocol.py | 2 +- tests/test_protocol.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 43dcbd4ff..5161017b2 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -725,7 +725,7 @@ async def transfer_data(self) -> None: while len(self.messages) >= self.max_queue: self._put_message_waiter = self.loop.create_future() try: - await self._put_message_waiter + await asyncio.shield(self._put_message_waiter) finally: self._put_message_waiter = None diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 0d3185d42..321d20f63 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -459,12 +459,15 @@ def test_recv_queue_full(self): self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() self.assertEqual(list(self.protocol.messages), [b"tea", b"milk"]) self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() self.assertEqual(list(self.protocol.messages), [b"milk"]) self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() self.assertEqual(list(self.protocol.messages), []) def test_recv_queue_no_limit(self): @@ -519,6 +522,27 @@ def test_recv_canceled_race_condition(self): # If we're getting "tea" there, it means "café" was swallowed (ha, ha). self.assertEqual(data, "café") + def test_recv_when_transfer_data_cancelled(self): + # Clog incoming queue. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + self.receive_frame(Frame(True, OP_BINARY, b"tea")) + self.run_loop_once() + + # Flow control kicks in (check with an implementation detail). + self.assertFalse(self.protocol._put_message_waiter.done()) + + # Schedule recv(). + recv = self.loop.create_task(self.protocol.recv()) + + # Cancel transfer_data_task (again, implementation detail). + self.protocol.fail_connection() + self.run_loop_once() + self.assertTrue(self.protocol.transfer_data_task.cancelled()) + + # recv() completes properly. + self.assertEqual(self.loop.run_until_complete(recv), "café") + def test_recv_prevents_concurrent_calls(self): recv = self.loop.create_task(self.protocol.recv()) From d601f68b7edfed92fbb7566511bea927f324b3c2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 1 Jul 2019 21:54:46 +0200 Subject: [PATCH 114/281] Serialize sending fragmented messages. While sending a fragmented message, no other data frame can be sent. Fix #542. --- src/websockets/protocol.py | 23 ++++++++++++++++++++++- tests/test_protocol.py | 32 +++++++++++++++++++++++++++++++- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 5161017b2..8eab48651 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -258,6 +258,9 @@ def __init__( self._pop_message_waiter: Optional[asyncio.Future[None]] = None self._put_message_waiter: Optional[asyncio.Future[None]] = None + # Flag that protects sending fragmented messages. + self.sending_fragmented_message = False + # Mapping of ping IDs to waiters, in chronological order. self.pings: collections.OrderedDict[ bytes, asyncio.Future[None] @@ -418,7 +421,7 @@ async def recv(self) -> Data: """ if self._pop_message_waiter is not None: raise RuntimeError( - "cannot call recv() while another coroutine " + "cannot call recv while another coroutine " "is already waiting for the next message" ) @@ -487,6 +490,13 @@ async def send( """ await self.ensure_open() + # Prevent sending other messages until all fragments are sent. + if self.sending_fragmented_message: + raise RuntimeError( + "cannot call send while another coroutine " + "is sending a fragmented message" + ) + # Unfragmented message -- this case must be handled first because # strings and bytes-like objects are iterable. @@ -503,6 +513,8 @@ async def send( iter_message = iter(message) + self.sending_fragmented_message = True + # First fragment. try: message_chunk = next(iter_message) @@ -521,6 +533,9 @@ async def send( raise TypeError("data contains inconsistent types") await self.write_frame(False, OP_CONT, data) + # write_frame() will write to the buffer before yielding control. + self.sending_fragmented_message = False + # Final fragment. await self.write_frame(True, OP_CONT, b"") @@ -530,6 +545,9 @@ async def send( # aiter_message = aiter(message) without aiter aiter_message = type(message).__aiter__(message) + # Prevent sending other messages until all fragments are sent. + self.sending_fragmented_message = True + # First fragment. try: # message_chunk = anext(aiter_message) without anext @@ -549,6 +567,9 @@ async def send( raise TypeError("data contains inconsistent types") await self.write_frame(False, OP_CONT, data) + # write_frame() will write to the buffer before yielding control. + self.sending_fragmented_message = False + # Final fragment. await self.write_frame(True, OP_CONT, b"") diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 321d20f63..342b3255e 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -546,7 +546,11 @@ def test_recv_when_transfer_data_cancelled(self): def test_recv_prevents_concurrent_calls(self): recv = self.loop.create_task(self.protocol.recv()) - with self.assertRaises(RuntimeError): + with self.assertRaisesRegex( + RuntimeError, + "cannot call recv while another coroutine " + "is already waiting for the next message", + ): self.loop.run_until_complete(self.protocol.recv()) recv.cancel() @@ -633,6 +637,19 @@ def test_send_iterable_mixed_type_error(self): (True, OP_CLOSE, serialize_close(1011, "")), ) + def test_send_iterable_prevents_concurrent_send(self): + self.make_drain_slow() + send = self.loop.create_task(self.protocol.send(["ca", "fé"])) + + with self.assertRaisesRegex( + RuntimeError, + "cannot call send while another coroutine " + "is sending a fragmented message", + ): + self.loop.run_until_complete(self.protocol.send("tea")) + + send.cancel() + def test_send_async_iterable_text(self): self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"]))) self.assertFramesSent( @@ -692,6 +709,19 @@ def test_send_async_iterable_mixed_type_error(self): (True, OP_CLOSE, serialize_close(1011, "")), ) + def test_send_async_iterable_prevents_concurrent_send(self): + self.make_drain_slow() + send = self.loop.create_task(self.protocol.send(async_iterable(["ca", "fé"]))) + + with self.assertRaisesRegex( + RuntimeError, + "cannot call send while another coroutine " + "is sending a fragmented message", + ): + self.loop.run_until_complete(self.protocol.send("tea")) + + send.cancel() + def test_send_on_closing_connection_local(self): close_task = self.half_close_connection_local() From 7ef66541192be89373a904c27908a411400f5d68 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 1 Jul 2019 22:06:32 +0200 Subject: [PATCH 115/281] Serialize sending fragmented messages. When sending a fragmented message, wait until it's finished to send other messages. Fix #542 (more elegantly). --- src/websockets/protocol.py | 101 +++++++++++++++++++------------------ tests/test_protocol.py | 43 ++++++++-------- 2 files changed, 74 insertions(+), 70 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 8eab48651..6f2399283 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -258,8 +258,8 @@ def __init__( self._pop_message_waiter: Optional[asyncio.Future[None]] = None self._put_message_waiter: Optional[asyncio.Future[None]] = None - # Flag that protects sending fragmented messages. - self.sending_fragmented_message = False + # Protect sending fragmented messages. + self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None # Mapping of ping IDs to waiters, in chronological order. self.pings: collections.OrderedDict[ @@ -490,12 +490,10 @@ async def send( """ await self.ensure_open() - # Prevent sending other messages until all fragments are sent. - if self.sending_fragmented_message: - raise RuntimeError( - "cannot call send while another coroutine " - "is sending a fragmented message" - ) + # While sending a fragmented message, prevent sending other messages + # until all fragments are sent. + while self._fragmented_message_waiter is not None: + await asyncio.shield(self._fragmented_message_waiter) # Unfragmented message -- this case must be handled first because # strings and bytes-like objects are iterable. @@ -512,66 +510,73 @@ async def send( message = cast(Iterable[Data], message) iter_message = iter(message) - - self.sending_fragmented_message = True - - # First fragment. try: message_chunk = next(iter_message) except StopIteration: return opcode, data = prepare_data(message_chunk) - await self.write_frame(False, opcode, data) - # Other fragments. - for message_chunk in iter_message: - confirm_opcode, data = prepare_data(message_chunk) - if confirm_opcode != opcode: - # We're half-way through a fragmented message and we can't - # complete it. This makes the connection unusable. - self.fail_connection(1011) - raise TypeError("data contains inconsistent types") - await self.write_frame(False, OP_CONT, data) + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) - # write_frame() will write to the buffer before yielding control. - self.sending_fragmented_message = False + # Other fragments. + for message_chunk in iter_message: + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) - # Final fragment. - await self.write_frame(True, OP_CONT, b"") + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except Exception: + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None # Fragmented message -- asynchronous iterator elif isinstance(message, AsyncIterable): # aiter_message = aiter(message) without aiter aiter_message = type(message).__aiter__(message) - - # Prevent sending other messages until all fragments are sent. - self.sending_fragmented_message = True - - # First fragment. try: # message_chunk = anext(aiter_message) without anext message_chunk = await type(aiter_message).__anext__(aiter_message) except StopAsyncIteration: return opcode, data = prepare_data(message_chunk) - await self.write_frame(False, opcode, data) - - # Other fragments. - async for message_chunk in aiter_message: - confirm_opcode, data = prepare_data(message_chunk) - if confirm_opcode != opcode: - # We're half-way through a fragmented message and we can't - # complete it. This makes the connection unusable. - self.fail_connection(1011) - raise TypeError("data contains inconsistent types") - await self.write_frame(False, OP_CONT, data) - - # write_frame() will write to the buffer before yielding control. - self.sending_fragmented_message = False - - # Final fragment. - await self.write_frame(True, OP_CONT, b"") + + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) + + # Other fragments. + async for message_chunk in aiter_message: + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except Exception: + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None else: raise TypeError("data must be bytes, str, or iterable") diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 342b3255e..d0156fd74 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -638,17 +638,15 @@ def test_send_iterable_mixed_type_error(self): ) def test_send_iterable_prevents_concurrent_send(self): - self.make_drain_slow() - send = self.loop.create_task(self.protocol.send(["ca", "fé"])) - - with self.assertRaisesRegex( - RuntimeError, - "cannot call send while another coroutine " - "is sending a fragmented message", - ): - self.loop.run_until_complete(self.protocol.send("tea")) - - send.cancel() + self.loop.run_until_complete( + asyncio.gather(self.protocol.send(["ca", "fé"]), self.protocol.send(b"tea")) + ) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + (True, OP_BINARY, b"tea"), + ) def test_send_async_iterable_text(self): self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"]))) @@ -710,17 +708,18 @@ def test_send_async_iterable_mixed_type_error(self): ) def test_send_async_iterable_prevents_concurrent_send(self): - self.make_drain_slow() - send = self.loop.create_task(self.protocol.send(async_iterable(["ca", "fé"]))) - - with self.assertRaisesRegex( - RuntimeError, - "cannot call send while another coroutine " - "is sending a fragmented message", - ): - self.loop.run_until_complete(self.protocol.send("tea")) - - send.cancel() + self.loop.run_until_complete( + asyncio.gather( + self.protocol.send(async_iterable(["ca", "fé"])), + self.protocol.send(b"tea"), + ) + ) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + (True, OP_BINARY, b"tea"), + ) def test_send_on_closing_connection_local(self): close_task = self.half_close_connection_local() From e3452230eaf67ec5c4c253682eecb26aebee2223 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Jun 2019 14:22:58 +0200 Subject: [PATCH 116/281] Discourage cancellation of APIs that write frames. If writing is stuck (and closing the connection counts as a write), then cancelling won't achieve anything. There's only one way out: closing the connection, waiting until all timeouts elapse, and eventually websockets gives up and aborts the TCP connection. Fix #278. --- src/websockets/protocol.py | 41 ++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 6f2399283..d7e16dc4a 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -479,14 +479,25 @@ async def send( object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) as a binary frame. - It also accepts an iterable or an asynchronous iterator of strings or - bytes-like objects. Each item is treated as a message fragment and - sent in its own frame. All items must be of the same type, or else - :meth:`send` will raise a :exc:`TypeError` and the connection will be - closed. + It also accepts an iterable or an asynchronous iterable of strings or + bytes-like objects. In that case the message is fragmented. Each item + is treated as a message fragment and sent in its own frame. All items + must be of the same type, or else :meth:`send` will raise a + :exc:`TypeError` and the connection will be closed. It raises a :exc:`TypeError` for other inputs. + Canceling :meth:`send` is discouraged. Instead, you should close the + connection with :meth:`close`. Indeed, there only two situations where + :meth:`send` yields control to the event loop: + + 1. The write buffer is full. If you don't want to wait until enough + data is sent, your only alternative is to close the connection. + :meth:`close` will likely time out then abort the TCP connection. + 2. ``message`` is an asynchronous iterator. Stopping in the middle of + a fragmented message will cause a protocol error. Closing the + connection has the same effect. + """ await self.ensure_open() @@ -589,13 +600,17 @@ async def close(self, code: int = 1000, reason: str = "") -> None: connection to terminate. As a consequence, there's no need to await :meth:`wait_closed`; :meth:`close` already does it. + ``code`` must be an :class:`int` and ``reason`` a :class:`str`. + :meth:`close` is idempotent: it doesn't do anything once the connection is closed. - It's safe to wrap this coroutine in :func:`~asyncio.create_task` since - errors during connection termination aren't particularly useful. + Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given + that errors during connection termination aren't particularly useful. - ``code`` must be an :class:`int` and ``reason`` a :class:`str`. + Canceling :meth:`close` is discouraged. If it takes too long, you can + set a shorter ``close_timeout``. If you don't want to wait, let the + Python process exit, then the OS will close the TCP connection. """ try: @@ -648,6 +663,13 @@ async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: overridden with the optional ``data`` argument which must be a string (which will be encoded to UTF-8) or a bytes-like object. + Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return + immediately, it means the write buffer is full. If you don't want to + wait, you should close the connection. + + Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no + effect. + """ await self.ensure_open() @@ -678,6 +700,9 @@ async def pong(self, data: bytes = b"") -> None: which must be a string (which will be encoded to UTF-8) or a bytes-like object. + Canceling :meth:`pong` is discouraged for the same reason as + :meth:`ping`. + """ await self.ensure_open() From 5a1b0bb890cb3d6c0ba2dcf05996f7fed8d3b751 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 3 Jul 2019 20:22:57 +0200 Subject: [PATCH 117/281] Change status code for invalid credentials to 401. 403 means the credentials are valid but don't provide permissions. --- src/websockets/auth.py | 6 +++++- tests/test_auth.py | 9 ++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/websockets/auth.py b/src/websockets/auth.py index 60f63e9aa..9cb673132 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -75,7 +75,11 @@ async def process_request( ) if not await self.check_credentials(username, password): - return (http.HTTPStatus.FORBIDDEN, [], b"Invalid credentials\n") + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Invalid credentials\n", + ) self.username = username diff --git a/tests/test_auth.py b/tests/test_auth.py index bcd340844..07341df56 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -122,7 +122,7 @@ def test_basic_auth_unsupported_credentials_details(self): def test_basic_auth_invalid_credentials(self): with self.assertRaises(InvalidStatusCode) as raised: self.start_client(user_info=("hello", "ihateyou")) - self.assertEqual(raised.exception.status_code, 403) + self.assertEqual(raised.exception.status_code, 401) @with_server(create_protocol=create_protocol) def test_basic_auth_invalid_credentials_details(self): @@ -131,6 +131,9 @@ def test_basic_auth_invalid_credentials_details(self): self.loop.run_until_complete( self.make_http_request(headers={"Authorization": authorization}) ) - self.assertEqual(raised.exception.code, 403) - self.assertNotIn("WWW-Authenticate", raised.exception.headers) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) self.assertEqual(raised.exception.read().decode(), "Invalid credentials\n") From be04e2fe397ba0dd4b7f7a7f33b84cd4c2c2efd2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 3 Jul 2019 20:39:58 +0200 Subject: [PATCH 118/281] Try to make tests less flaky. Fix #639. --- tests/test_protocol.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index d0156fd74..7cb593702 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -638,9 +638,16 @@ def test_send_iterable_mixed_type_error(self): ) def test_send_iterable_prevents_concurrent_send(self): - self.loop.run_until_complete( - asyncio.gather(self.protocol.send(["ca", "fé"]), self.protocol.send(b"tea")) - ) + self.make_drain_slow(2 * MS) + + async def send_iterable(): + await self.protocol.send(["ca", "fé"]) + + async def send_concurrent(): + await asyncio.sleep(MS) + await self.protocol.send(b"tea") + + self.loop.run_until_complete(asyncio.gather(send_iterable(), send_concurrent())) self.assertFramesSent( (False, OP_TEXT, "ca".encode("utf-8")), (False, OP_CONT, "fé".encode("utf-8")), @@ -708,11 +715,17 @@ def test_send_async_iterable_mixed_type_error(self): ) def test_send_async_iterable_prevents_concurrent_send(self): + self.make_drain_slow(2 * MS) + + async def send_async_iterable(): + await self.protocol.send(async_iterable(["ca", "fé"])) + + async def send_concurrent(): + await asyncio.sleep(MS) + await self.protocol.send(b"tea") + self.loop.run_until_complete( - asyncio.gather( - self.protocol.send(async_iterable(["ca", "fé"])), - self.protocol.send(b"tea"), - ) + asyncio.gather(send_async_iterable(), send_concurrent()) ) self.assertFramesSent( (False, OP_TEXT, "ca".encode("utf-8")), From 3718311049eb32a46a0e6b40c1132eeef85fd369 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 2 Jul 2019 22:44:15 +0200 Subject: [PATCH 119/281] Avoid logging ping exceptions that aren't retreived. This is a bit of a hack: it relies on the implementation of asyncio. Fix #637. --- src/websockets/protocol.py | 11 ++++++++--- tests/test_protocol.py | 10 ++++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index d7e16dc4a..fdadb9398 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1053,7 +1053,7 @@ async def keepalive_ping(self) -> None: # when close_connection() cancels self.keepalive_ping_task. # ping() raises ConnectionClosed if the connection is lost, - # when connection_lost() calls abort_keepalive_pings(). + # when connection_lost() calls abort_pings(). ping_waiter = await self.ping() @@ -1223,7 +1223,7 @@ def fail_connection(self, code: int = 1006, reason: str = "") -> None: if not hasattr(self, "close_connection_task"): self.close_connection_task = self.loop.create_task(self.close_connection()) - def abort_keepalive_pings(self) -> None: + def abort_pings(self) -> None: """ Raise ConnectionClosed in pending keepalive pings. @@ -1235,6 +1235,11 @@ def abort_keepalive_pings(self) -> None: for ping in self.pings.values(): ping.set_exception(exc) + # If the exception is never retrieved, it will be logged when ping + # is garbage-collected. This is confusing for users. + # Given that ping is done (with an exception), canceling it does + # nothing, but it prevents logging the exception. + ping.cancel() if self.pings: pings_hex = ", ".join( @@ -1312,7 +1317,7 @@ def connection_lost(self, exc: Optional[Exception]) -> None: self.close_code, self.close_reason or "[no reason]", ) - self.abort_keepalive_pings() + self.abort_pings() # If self.connection_lost_waiter isn't pending, that's a bug, because: # - it's set only here in connection_lost() which is called only once; # - it must never be canceled. diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 7cb593702..a6c420181 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -910,6 +910,16 @@ def test_abort_ping(self): self.assertTrue(ping.done()) self.assertIsInstance(ping.exception(), ConnectionClosed) + def test_abort_ping_does_not_log_exception_if_not_retreived(self): + self.loop.run_until_complete(self.protocol.ping()) + # Get the internal Future, which isn't directly returned by ping(). + ping, = self.protocol.pings.values() + # Remove the frame from the buffer, else close_connection() complains. + self.last_sent_frame() + self.close_connection() + # Check a private attribute, for lack of a better solution. + self.assertFalse(ping._log_traceback) + def test_acknowledge_previous_pings(self): pings = [ (self.loop.run_until_complete(self.protocol.ping()), self.last_sent_frame()) From 45e9a86e5dfdb772ce40a64863083b2664d2e44b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 3 Jul 2019 23:34:11 +0200 Subject: [PATCH 120/281] Fix references in changelog. --- docs/changelog.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 7a02ec0e7..2556d70cc 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -98,7 +98,7 @@ Also: .. warning:: **Version 7.0 changes how a server terminates connections when it's - closed with** :meth:`~websockets.server.WebSocketServer.close` **.** + closed with** :meth:`~server.WebSocketServer.close` **.** Previously, connections handlers were canceled. Now, connections are closed with close code 1001 (going away). From the perspective of the @@ -223,7 +223,7 @@ Also: **Version 5.0 adds a** ``user_info`` **field to the return value of** :func:`~uri.parse_uri` **and** :class:`~uri.WebSocketURI` **.** - If you're unpacking :class:`~websockets.WebSocketURI` into four variables, + If you're unpacking :class:`~exceptions.WebSocketURI` into four variables, adjust your code to account for that fifth field. Also: From b0f6a3ec70fdfe87be8fbb877ca9bc46dc877c39 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 3 Jul 2019 23:33:22 +0200 Subject: [PATCH 121/281] Close connections properly in WebSocketServer.close. Thanks @lburg for the first iteration of this patch. Fix #541. --- docs/changelog.rst | 3 +++ src/websockets/protocol.py | 1 - src/websockets/server.py | 30 +++++++++++++----------------- tests/test_client_server.py | 4 +++- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2556d70cc..b99d3d058 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -68,6 +68,9 @@ Also: If you were using ``ping_timeout=None`` as a workaround, you can remove it. +* Changed :meth:`~server.WebSocketServer.close` to perform a proper closing + handshake instead of failing the connection. + * Avoided a crash of a ``extra_headers`` callable returns ``None``. * Improved error messages when HTTP parsing fails. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index fdadb9398..acc45e87b 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -635,7 +635,6 @@ async def close(self, code: int = 1000, reason: str = "") -> None: try: # If close() is canceled during the wait, self.transfer_data_task # is canceled before the timeout elapses. - # This helps closing connections when shutting down a server. await asyncio.wait_for( self.transfer_data_task, self.close_timeout, loop=self.loop ) diff --git a/src/websockets/server.py b/src/websockets/server.py index 8e1db9b7c..42487480a 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -46,7 +46,7 @@ parse_subprotocol, ) from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request -from .protocol import State, WebSocketCommonProtocol +from .protocol import WebSocketCommonProtocol from .typing import Origin, Subprotocol @@ -692,26 +692,22 @@ async def _close(self) -> None: # register(). See https://bugs.python.org/issue34852 for details. await asyncio.sleep(0) - # Close open connections. fail_connection() will cancel the transfer - # data task, which is expected to cause the handler task to terminate. - for websocket in self.websockets: - if websocket.state is State.OPEN: - websocket.fail_connection(1001) + # Close OPEN connections with status code 1001. Since the server was + # closed, handshake() closes OPENING conections with a HTTP 503 error. + # Wait until all connections are closed. + + # asyncio.wait doesn't accept an empty first argument + if self.websockets: + await asyncio.wait( + [websocket.close(1001) for websocket in self.websockets], loop=self.loop + ) + + # Wait until all connection handlers are complete. # asyncio.wait doesn't accept an empty first argument. if self.websockets: - # The connection handler can terminate before or after the - # connection closes. Wait until both are done to avoid leaking - # running tasks. - # TODO: it would be nicer to wait only for the connection handler - # and let the handler wait for the connection to close. await asyncio.wait( - [websocket.handler_task for websocket in self.websockets] - + [ - websocket.close_connection_task - for websocket in self.websockets - if websocket.state is State.OPEN - ], + [websocket.handler_task for websocket in self.websockets], loop=self.loop, ) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 7281ec6bd..aa4bebdc2 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1172,12 +1172,14 @@ def test_server_shuts_down_during_opening_handshake(self): @with_server() def test_server_shuts_down_during_connection_handling(self): with self.temp_client(): + server_ws = next(iter(self.server.websockets)) self.server.close() with self.assertRaises(ConnectionClosed): self.loop.run_until_complete(self.client.recv()) - # Websocket connection terminates with 1001 Going Away. + # Websocket connection closes properly with 1001 Going Away. self.assertEqual(self.client.close_code, 1001) + self.assertEqual(server_ws.close_code, 1001) @with_server() @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") From fc245f269e7108392a4437b1d1b02ed5b99dd9fa Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 30 Jun 2019 22:30:59 +0200 Subject: [PATCH 122/281] Improve API docs. * Add info fields for parameters and exceptions. * Rewrite significant parts for clarity. * Make minor consistency fixes. Refs #567. --- docs/api.rst | 7 +- docs/changelog.rst | 2 + docs/spelling_wordlist.txt | 4 + src/websockets/auth.py | 33 ++- src/websockets/client.py | 88 +++--- src/websockets/extensions/base.py | 39 ++- .../extensions/permessage_deflate.py | 50 ++-- src/websockets/framing.py | 105 ++++--- src/websockets/handshake.py | 51 ++-- src/websockets/headers.py | 91 ++++--- src/websockets/http.py | 74 +++-- src/websockets/protocol.py | 156 ++++++----- src/websockets/server.py | 256 ++++++++++-------- src/websockets/typing.py | 19 +- src/websockets/uri.py | 30 +- src/websockets/utils.py | 5 +- tests/test_auth.py | 6 +- 17 files changed, 532 insertions(+), 484 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 56372eb11..28f41cc40 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -13,12 +13,11 @@ of low-level APIs reflecting the two phases of the WebSocket protocol: 2. Data transfer, as framed messages, ending with a closing handshake. The first phase is designed to integrate with existing HTTP software. -``websockets`` provides functions to build and validate the request and -response headers. +``websockets`` provides a minimal implementation to build, parse and validate +HTTP requests and responses. The second phase is the core of the WebSocket protocol. ``websockets`` -provides a standalone implementation on top of ``asyncio`` with a very simple -API. +provides a complete implementation on top of ``asyncio`` with a simple API. For convenience, public APIs can be imported directly from the :mod:`websockets` package, unless noted otherwise. Anything that isn't listed diff --git a/docs/changelog.rst b/docs/changelog.rst index b99d3d058..aa4a76259 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -85,6 +85,8 @@ Also: * Documented how to optimize memory usage. +* Improved API documentation. + 7.0 ... diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index c2988ead5..1eacc491d 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -9,6 +9,8 @@ Bitcoin bufferbloat Bufferbloat bugfix +bytestring +bytestrings changelog cryptocurrency daemonize @@ -22,6 +24,7 @@ MiB nginx permessage pong +pongs Pythonic serializers subclassing @@ -30,6 +33,7 @@ subprotocols TLS Unparse uple +username websocket WebSocket websockets diff --git a/src/websockets/auth.py b/src/websockets/auth.py index 9cb673132..ae204b8d9 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -1,6 +1,6 @@ """ -The :mod:`websockets.auth` module implements HTTP Basic Authentication as -specified in :rfc:`7235` and :rfc:`7617`. +:mod:`websockets.auth` provides HTTP Basic Authentication according to +:rfc:`7235` and :rfc:`7617`. """ @@ -108,27 +108,32 @@ def basic_auth_protocol_factory( ) ) - ``realm`` indicates the scope of protection. It should be an ASCII-only - :class:`str` because the encoding of non-ASCII characters is undefined. + ``realm`` indicates the scope of protection. It should contain only ASCII + characters because the encoding of non-ASCII characters is undefined. Refer to section 2.2 of :rfc:`7235` for details. - One of ``credentials`` or ``check_credentials`` must be provided but not - both. - - ``credentials`` defines hardcoded authorized credentials. It can be a + ``credentials`` defines hard coded authorized credentials. It can be a ``(username, password)`` pair or a list of such pairs. ``check_credentials`` defines a coroutine that checks whether credentials are authorized. This coroutine receives ``username`` and ``password`` arguments and returns a :class:`bool`. - By default, ``basic_auth_protocol_factory`` creates instances of - :class:`BasicAuthWebSocketServerProtocol`. You can override this with the - ``create_protocol`` parameter. + One of ``credentials`` or ``check_credentials`` must be provided but not + both. + + By default, ``basic_auth_protocol_factory`` creates a factory for building + :class:`BasicAuthWebSocketServerProtocol` instances. You can override this + with the ``create_protocol`` parameter. + + :param realm: scope of protection + :param credentials: hard coded credentials + :param check_credentials: coroutine that verifies credentials + :raises TypeError: if the credentials argument has the wrong type """ if (credentials is None) == (check_credentials is None): - raise ValueError("provide either credentials or check_credentials") + raise TypeError("provide either credentials or check_credentials") if credentials is not None: if is_credentials(credentials): @@ -145,10 +150,10 @@ async def check_credentials(username: str, password: str) -> bool: return credentials_dict.get(username) == password else: - raise ValueError(f"invalid credentials argument: {credentials}") + raise TypeError(f"invalid credentials argument: {credentials}") else: - raise ValueError(f"invalid credentials argument: {credentials}") + raise TypeError(f"invalid credentials argument: {credentials}") return functools.partial( create_protocol, realm=realm, check_credentials=check_credentials diff --git a/src/websockets/client.py b/src/websockets/client.py index 10435c1ff..89a624511 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -1,5 +1,5 @@ """ -The :mod:`websockets.client` module defines a simple WebSocket client API. +:mod:`websockets.client` defines the WebSocket client APIs. """ @@ -44,7 +44,7 @@ class WebSocketClientProtocol(WebSocketCommonProtocol): """ - Complete WebSocket client implementation as an :class:`asyncio.Protocol`. + :class:`~asyncio.Protocol` subclass implementing a WebSocket client. This class inherits most of its methods from :class:`~websockets.protocol.WebSocketCommonProtocol`. @@ -91,12 +91,11 @@ async def read_http_response(self) -> Tuple[int, Headers]: """ Read status line and headers from the HTTP response. - Raise :exc:`~websockets.exceptions.InvalidMessage` if the HTTP message - is malformed or isn't an HTTP/1.1 GET request. + If the response contains a body, it may be read from ``self.reader`` + after this coroutine returns. - Don't attempt to read the response body because WebSocket handshake - responses don't have one. If the response contains a body, it may be - read from ``self.reader`` after this coroutine returns. + :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is + malformed or isn't an HTTP/1.1 GET response """ try: @@ -234,21 +233,17 @@ async def handshake( """ Perform the client side of the opening handshake. - If provided, ``origin`` sets the Origin HTTP header. - - If provided, ``available_extensions`` is a list of supported - extensions in the order in which they should be used. - - If provided, ``available_subprotocols`` is a list of supported - subprotocols in order of decreasing preference. - - If provided, ``extra_headers`` sets additional HTTP request headers. - It must be a :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` - pairs. - - Raise :exc:`~websockets.exceptions.InvalidHandshake` if the handshake - fails. + :param origin: sets the Origin HTTP header + :param available_extensions: list of supported extensions in the order + in which they should be used + :param available_subprotocols: list of supported subprotocols in order + of decreasing preference + :param extra_headers: sets additional HTTP request headers; it must be + a :class:`~websockets.http.Headers` instance, a + :class:`~collections.abc.Mapping`, or an iterable of ``(name, + value)`` pairs + :raises ~websockets.exceptions.InvalidHandshake: if the handshake + fails """ request_headers = Headers() @@ -318,16 +313,15 @@ class Connect: """ Connect to the WebSocket server at the given ``uri``. - :func:`connect` returns an awaitable. Awaiting it yields an instance of - :class:`WebSocketClientProtocol` which can then be used to send and - receive messages. + Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which + can then be used to send and receive messages. :func:`connect` can also be used as a asynchronous context manager. In that case, the connection is closed when exiting the context. :func:`connect` is a wrapper around the event loop's - :meth:`~asyncio.BaseEventLoop.create_connection` method. Unknown keyword - arguments are passed to :meth:`~asyncio.BaseEventLoop.create_connection`. + :meth:`~asyncio.loop.create_connection` method. Unknown keyword arguments + are passed to :meth:`~asyncio.loop.create_connection`. For example, you can set the ``ssl`` keyword argument to a :class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to @@ -336,20 +330,21 @@ class Connect: You can connect to a different host and port from those found in ``uri`` by setting ``host`` and ``port`` keyword arguments. This only changes the - destination of the TCP connection; the hostname from ``uri`` is still used - in the TLS handshake for secure connections and in the ``Host`` header. - - The behavior of the ``ping_interval``, ``ping_timeout``, ``close_timeout``, - ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` optional - arguments is described in the documentation of - :class:`~websockets.protocol.WebSocketCommonProtocol`. - - The ``create_protocol`` parameter allows customizing the asyncio protocol - that manages the connection. It should be a callable or class accepting - the same arguments as :class:`WebSocketClientProtocol` and returning a - :class:`WebSocketClientProtocol` instance. It defaults to + destination of the TCP connection. The host name from ``uri`` is still + used in the TLS handshake for secure connections and in the ``Host`` HTTP + header. + + The ``create_protocol`` parameter allows customizing the + :class:`~asyncio.Protocol` that manages the connection. It should be a + callable or class accepting the same arguments as + :class:`WebSocketClientProtocol` and returning an instance of + :class:`WebSocketClientProtocol` or a subclass. It defaults to :class:`WebSocketClientProtocol`. + The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is + described in :class:`~websockets.protocol.WebSocketCommonProtocol`. + :func:`connect` also accepts the following optional arguments: * ``compression`` is a shortcut to configure compression extensions; @@ -360,14 +355,14 @@ class Connect: decreasing preference * ``subprotocols`` is a list of supported subprotocols in order of decreasing preference - * ``extra_headers`` sets additional HTTP request headers – it can be a + * ``extra_headers`` sets additional HTTP request headers; it can be a :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` pairs - :func:`connect` raises :exc:`~websockets.uri.InvalidURI` if ``uri`` is - invalid and :exc:`~websockets.handshake.InvalidHandshake` if the opening - handshake fails. + :raises ~websockets.uri.InvalidURI: if ``uri`` is invalid + :raises ~websockets.handshake.InvalidHandshake: if the opening handshake + fails """ @@ -577,14 +572,15 @@ def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Conn """ Similar to :func:`connect`, but for connecting to a Unix socket. - ``path`` is the path to the Unix socket. ``uri`` is the WebSocket URI. - This function calls the event loop's - :meth:`~asyncio.AbstractEventLoop.create_unix_connection` method. + :meth:`~asyncio.loop.create_unix_connection` method. It is only available on Unix. It's mainly useful for debugging servers listening on Unix sockets. + :param path: file system path to the Unix socket + :param uri: WebSocket URI + """ return connect(uri=uri, path=path, **kwargs) diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index ed847c6bc..7d46687c6 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -1,6 +1,8 @@ """ -The :mod:`websockets.extensions.base` module defines abstract classes for -implementing extensions as specified in `section 9 of RFC 6455`_. +:mod:`websockets.extensions.base` defines abstract classes for implementing +extensions. + +See `section 9 of RFC 6455`_. .. _section 9 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-9 @@ -32,8 +34,8 @@ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: """ Decode an incoming frame. - The ``frame`` parameter and the return value are - :class:`~websockets.framing.Frame` instances. + :param frame: incoming frame + :param max_size: maximum payload size in bytes """ @@ -41,8 +43,7 @@ def encode(self, frame: Frame) -> Frame: """ Encode an outgoing frame. - The ``frame`` parameter and the return value are - :class:`~websockets.framing.Frame` instances. + :param frame: outgoing frame """ @@ -64,7 +65,7 @@ def get_request_params(self) -> List[ExtensionParameter]: """ Build request parameters. - Return a list of (name, value) pairs. + Return a list of ``(name, value)`` pairs. """ @@ -76,14 +77,10 @@ def process_response_params( """ Process response parameters received from the server. - ``params`` is a list of (name, value) pairs. - - ``accepted_extensions`` is a list of previously accepted extensions. - - If parameters are acceptable, return an extension: an instance of a - subclass of :class:`Extension`. - - If they aren't, raise :exc:`~websockets.exceptions.NegotiationError`. + :param params: list of ``(name, value)`` pairs. + :param accepted_extensions: list of previously accepted extensions. + :raises ~websockets.exceptions.NegotiationError: if parameters aren't + acceptable """ @@ -109,16 +106,14 @@ def process_request_params( """ Process request parameters received from the client. - ``params`` is a list of (name, value) pairs. - - ``accepted_extensions`` is a list of previously accepted extensions. - To accept the offer, return a 2-uple containing: - - response parameters: a list of (name, value) pairs + - response parameters: a list of ``(name, value)`` pairs - an extension: an instance of a subclass of :class:`Extension` - To reject the offer, raise - :exc:`~websockets.exceptions.NegotiationError`. + :param params: list of ``(name, value)`` pairs. + :param accepted_extensions: list of previously accepted extensions. + :raises ~websockets.exceptions.NegotiationError: to reject the offer, + if parameters aren't acceptable """ diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index bd4b3fa53..a41fd56ca 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -1,6 +1,6 @@ """ -The :mod:`websockets.extensions.permessage_deflate` module implements the -Compression Extensions for WebSocket as specified in :rfc:`7692`. +:mod:`websockets.extensions.permessage_deflate` implements the Compression +Extensions for WebSocket as specified in :rfc:`7692`. """ @@ -257,22 +257,20 @@ def _extract_parameters( class ClientPerMessageDeflateFactory(ClientExtensionFactory): """ - Client-side extension factory for Per-Message Deflate extension. + Client-side extension factory for the Per-Message Deflate extension. - These parameters behave as described in `section 7.1 of RFC 7692`_: - - - ``server_no_context_takeover`` - - ``client_no_context_takeover`` - - ``server_max_window_bits`` - - ``client_max_window_bits`` - - Set them to ``True`` to include them in the negotiation offer without a - value or to an integer value to include them with this value. + Parameters behave as described in `section 7.1 of RFC 7692`_. Set them to + ``True`` to include them in the negotiation offer without a value or to an + integer value to include them with this value. .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1 - ``compress_settings`` is an optional :class:`dict` of keyword arguments - for :func:`zlib.compressobj`, excluding ``wbits``. + :param server_no_context_takeover: defaults to ``False`` + :param client_no_context_takeover: defaults to ``False`` + :param server_max_window_bits: optional, defaults to ``None`` + :param client_max_window_bits: optional, defaults to ``None`` + :param compress_settings: optional, keyword arguments for + :func:`zlib.compressobj`, excluding ``wbits`` """ @@ -284,7 +282,7 @@ def __init__( client_no_context_takeover: bool = False, server_max_window_bits: Optional[int] = None, client_max_window_bits: Optional[Union[int, bool]] = None, - compress_settings: Optional[Dict[Any, Any]] = None, + compress_settings: Optional[Dict[str, Any]] = None, ) -> None: """ Configure the Per-Message Deflate extension factory. @@ -432,20 +430,18 @@ class ServerPerMessageDeflateFactory(ServerExtensionFactory): """ Server-side extension factory for the Per-Message Deflate extension. - These parameters behave as described in `section 7.1 of RFC 7692`_: - - - ``server_no_context_takeover`` - - ``client_no_context_takeover`` - - ``server_max_window_bits`` - - ``client_max_window_bits`` - - Set them to ``True`` to include them in the negotiation offer without a - value or to an integer value to include them with this value. + Parameters behave as described in `section 7.1 of RFC 7692`_. Set them to + ``True`` to include them in the negotiation offer without a value or to an + integer value to include them with this value. .. _section 7.1 of RFC 7692: https://tools.ietf.org/html/rfc7692#section-7.1 - ``compress_settings`` is an optional :class:`dict` of keyword arguments - for :func:`zlib.compressobj`, excluding ``wbits``. + :param server_no_context_takeover: defaults to ``False`` + :param client_no_context_takeover: defaults to ``False`` + :param server_max_window_bits: optional, defaults to ``None`` + :param client_max_window_bits: optional, defaults to ``None`` + :param compress_settings: optional, keyword arguments for + :func:`zlib.compressobj`, excluding ``wbits`` """ @@ -457,7 +453,7 @@ def __init__( client_no_context_takeover: bool = False, server_max_window_bits: Optional[int] = None, client_max_window_bits: Optional[int] = None, - compress_settings: Optional[Dict[Any, Any]] = None, + compress_settings: Optional[Dict[str, Any]] = None, ) -> None: """ Configure the Per-Message Deflate extension factory. diff --git a/src/websockets/framing.py b/src/websockets/framing.py index d668e0c52..ec87665ef 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -1,10 +1,11 @@ """ -The :mod:`websockets.framing` module implements data framing as specified in -`section 5 of RFC 6455`_. +:mod:`websockets.framing` reads and writes WebSocket frames. It deals with a single frame at a time. Anything that depends on the sequence of frames is implemented in :mod:`websockets.protocol`. +See `section 5 of RFC 6455`_. + .. _section 5 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-5 """ @@ -67,16 +68,15 @@ class Frame(FrameData): """ WebSocket frame. - * ``fin`` is the FIN bit - * ``rsv1`` is the RSV1 bit - * ``rsv2`` is the RSV2 bit - * ``rsv3`` is the RSV3 bit - * ``opcode`` is the opcode - * ``data`` is the payload data + :param bool fin: FIN bit + :param bool rsv1: RSV1 bit + :param bool rsv2: RSV2 bit + :param bool rsv3: RSV3 bit + :param int opcode: opcode + :param bytes data: payload data - Only these fields are needed by higher level code. The MASK bit, payload - length and masking-key are handled on the fly by :meth:`read` and - :meth:`write`. + Only these fields are needed. The MASK bit, payload length and masking-key + are handled on the fly by :meth:`read` and :meth:`write`. """ @@ -101,24 +101,20 @@ async def read( extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, ) -> "Frame": """ - Read a WebSocket frame and return a :class:`Frame` object. - - ``reader`` is a coroutine taking an integer argument and reading - exactly this number of bytes, unless the end of file is reached. - - ``mask`` is a :class:`bool` telling whether the frame should be masked - i.e. whether the read happens on the server side. - - If ``max_size`` is set and the payload exceeds this size in bytes, - :exc:`~websockets.exceptions.PayloadTooBig` is raised. - - If ``extensions`` is provided, it's a list of classes with an - ``decode()`` method that transform the frame and return a new frame. - They are applied in reverse order. - - This function validates the frame before returning it and raises - :exc:`~websockets.exceptions.WebSocketProtocolError` if it contains - incorrect values. + Read a WebSocket frame. + + :param reader: coroutine that reads exactly the requested number of + bytes, unless the end of file is reached + :param mask: whether the frame should be masked i.e. whether the read + happens on the server side + :param max_size: maximum payload size in bytes + :param extensions: list of classes with a ``decode()`` method that + transforms the frame and return a new frame; extensions are applied + in reverse order + :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds + ``max_size`` + :raises ~websockets.exceptions.WebSocketProtocolError: if the frame + contains incorrect values """ # Read the header. @@ -175,20 +171,15 @@ def write( """ Write a WebSocket frame. - ``frame`` is the :class:`Frame` object to write. - - ``writer`` is a function accepting bytes. - - ``mask`` is a :class:`bool` telling whether the frame should be masked - i.e. whether the write happens on the client side. - - If ``extensions`` is provided, it's a list of classes with an - ``encode()`` method that transform the frame and return a new frame. - They are applied in order. - - This function validates the frame before sending it and raises - :exc:`~websockets.exceptions.WebSocketProtocolError` if it contains - incorrect values. + :param frame: frame to write + :param writer: function that writes bytes + :param mask: whether the frame should be masked i.e. whether the write + happens on the client side + :param extensions: list of classes with an ``encode()`` method that + transform the frame and return a new frame; extensions are applied + in order + :raises ~websockets.exceptions.WebSocketProtocolError: if the frame + contains incorrect values """ # The first parameter is called `frame` rather than `self`, @@ -242,10 +233,10 @@ def write( def check(frame) -> None: """ - Check that this frame contains acceptable values. + Check that reserved bits and opcode have acceptable values. - Raise :exc:`~websockets.exceptions.WebSocketProtocolError` if this - frame contains incorrect values. + :raises ~websockets.exceptions.WebSocketProtocolError: if a reserved + bit or the opcode is invalid """ # The first parameter is called `frame` rather than `self`, @@ -277,7 +268,7 @@ def prepare_data(data: Data) -> Tuple[int, bytes]: If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like object. - Raise :exc:`TypeError` for other inputs. + :raises TypeError: if ``data`` doesn't have a supported type """ if isinstance(data, str): @@ -297,14 +288,14 @@ def encode_data(data: Data) -> bytes: """ Convert a string or byte-like object to bytes. - This function is designed for ping and pon g frames. + This function is designed for ping and pong frames. If ``data`` is a :class:`str`, return a :class:`bytes` object encoding ``data`` in UTF-8. If ``data`` is a bytes-like object, return a :class:`bytes` object. - Raise :exc:`TypeError` for other inputs. + :raises TypeError: if ``data`` doesn't have a supported type """ if isinstance(data, str): @@ -319,13 +310,12 @@ def encode_data(data: Data) -> bytes: def parse_close(data: bytes) -> Tuple[int, str]: """ - Parse the data in a close frame. + Parse the payload from a close frame. - Return ``(code, reason)`` when ``code`` is an :class:`int` and ``reason`` - a :class:`str`. + Return ``(code, reason)``. - Raise :exc:`~websockets.exceptions.WebSocketProtocolError` or - :exc:`UnicodeDecodeError` if the data is invalid. + :raises ~websockets.exceptions.WebSocketProtocolError: if data is ill-formed + :raises UnicodeDecodeError: if the reason isn't valid UTF-8 """ length = len(data) @@ -343,7 +333,7 @@ def parse_close(data: bytes) -> Tuple[int, str]: def serialize_close(code: int, reason: str) -> bytes: """ - Serialize the data for a close frame. + Serialize the payload for a close frame. This is the reverse of :func:`parse_close`. @@ -354,7 +344,10 @@ def serialize_close(code: int, reason: str) -> bytes: def check_close(code: int) -> None: """ - Check the close code for a close frame. + Check that the close code has an acceptable value for a close frame. + + :raises ~websockets.exceptions.WebSocketProtocolError: if the close code + is invalid """ if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index f04d81d59..17332d155 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -1,15 +1,9 @@ """ -The :mod:`websockets.handshake` module deals with the WebSocket opening -handshake according to `section 4 of RFC 6455`_. +:mod:`websockets.handshake` provides helpers for the WebSocket handshake. -.. _section 4 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 - -Functions defined in this module manipulate HTTP headers. The ``headers`` -argument must implement ``get`` and ``__setitem__`` and ``get`` — a small -subset of the :class:`~collections.abc.MutableMapping` abstract base class. +See `section 4 of RFC 6455`_. -Headers names and values are :class:`str` objects containing only ASCII -characters. +.. _section 4 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 Some checks cannot be performed because they depend too much on the context; instead, they're documented below. @@ -50,7 +44,10 @@ def build_request(headers: Headers) -> str: """ Build a handshake request to send to the server. - Return the ``key`` which must be passed to :func:`check_response`. + Update request headers passed in argument. + + :param headers: request headers + :returns: ``key`` which must be passed to :func:`check_response` """ raw_key = bytes(random.getrandbits(8) for _ in range(16)) @@ -66,16 +63,15 @@ def check_request(headers: Headers) -> str: """ Check a handshake request received from the client. - If the handshake is valid, this function returns the ``key`` which must be - passed to :func:`build_response`. - - Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake` - exception and the server must return an error like 400 Bad Request. - This function doesn't verify that the request is an HTTP/1.1 or higher GET - request and doesn't perform Host and Origin checks. These controls are - usually performed earlier in the HTTP request handling code. They're the - responsibility of the caller. + request and doesn't perform ``Host`` and ``Origin`` checks. These controls + are usually performed earlier in the HTTP request handling code. They're + the responsibility of the caller. + + :param headers: request headers + :returns: ``key`` which must be passed to :func:`build_response` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake request + is invalid; then the server must return 400 Bad Request error """ connection = sum( @@ -127,7 +123,10 @@ def build_response(headers: Headers, key: str) -> None: """ Build a handshake response to send to the client. - ``key`` comes from :func:`check_request`. + Update response headers passed in argument. + + :param headers: response headers + :param key: comes from :func:`check_request` """ headers["Upgrade"] = "websocket" @@ -139,17 +138,15 @@ def check_response(headers: Headers, key: str) -> None: """ Check a handshake response received from the server. - ``key`` comes from :func:`build_request`. - - If the handshake is valid, this function returns ``None``. - - Otherwise it raises an :exc:`~websockets.exceptions.InvalidHandshake` - exception. - This function doesn't verify that the response is an HTTP/1.1 or higher response with a 101 status code. These controls are the responsibility of the caller. + :param headers: response headers + :param key: comes from :func:`build_request` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake response + is invalid + """ connection = sum( [parse_connection(value) for value in headers.get_all("Connection")], [] diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 536cab592..ac850654e 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -1,13 +1,12 @@ """ -The :mod:`websockets.headers` module provides parsers and serializers for HTTP -headers used in WebSocket handshake messages. +:mod:`websockets.headers` provides parsers and serializers for HTTP headers +used in WebSocket handshake messages. -Its functions cannot be imported from :mod:`websockets`. They must be imported +These APIs cannot be imported from :mod:`websockets`. They must be imported from :mod:`websockets.headers`. """ - import base64 import binascii import re @@ -80,7 +79,7 @@ def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]: Return the token value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ match = _token_re.match(header, pos) @@ -103,7 +102,7 @@ def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, i Return the unquoted value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ match = _quoted_string_re.match(header, pos) @@ -153,7 +152,7 @@ def parse_list( Return a list of items. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ # Per https://tools.ietf.org/html/rfc7230#section-7, "a recipient MUST @@ -204,7 +203,7 @@ def parse_connection_option( Return the protocol value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ item, pos = parse_token(header, pos, header_name) @@ -215,9 +214,10 @@ def parse_connection(header: str) -> List[ConnectionOption]: """ Parse a ``Connection`` header. - Return a list of connection options. + Return a list of HTTP connection options. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :param header: value of the ``Connection`` header + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ return parse_list(parse_connection_option, header, 0, "Connection") @@ -236,7 +236,7 @@ def parse_upgrade_protocol( Return the protocol value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ match = _protocol_re.match(header, pos) @@ -249,9 +249,10 @@ def parse_upgrade(header: str) -> List[UpgradeProtocol]: """ Parse an ``Upgrade`` header. - Return a list of protocols. + Return a list of HTTP protocols. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :param header: value of the ``Upgrade`` header + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ return parse_list(parse_upgrade_protocol, header, 0, "Upgrade") @@ -265,7 +266,7 @@ def parse_extension_item_param( Return a ``(name, value)`` pair and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ # Extract parameter name. @@ -300,7 +301,7 @@ def parse_extension_item( Return an ``(extension name, parameters)`` pair, where ``parameters`` is a list of ``(name, value)`` pairs, and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ # Extract extension name. @@ -319,7 +320,7 @@ def parse_extension(header: str) -> List[ExtensionHeader]: """ Parse a ``Sec-WebSocket-Extensions`` header. - Return a value with the following format:: + Return a list of WebSocket extensions and their parameters in this format:: [ ( @@ -334,13 +335,13 @@ def parse_extension(header: str) -> List[ExtensionHeader]: Parameter values are ``None`` when no value is provided. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions") -parse_extension_list = parse_extension # alias for backwards-compatibility +parse_extension_list = parse_extension # alias for backwards compatibility def build_extension_item(name: str, parameters: List[ExtensionParameter]) -> str: @@ -362,7 +363,7 @@ def build_extension_item(name: str, parameters: List[ExtensionParameter]) -> str def build_extension(extensions: Sequence[ExtensionHeader]) -> str: """ - Unparse a ``Sec-WebSocket-Extensions`` header. + Build a ``Sec-WebSocket-Extensions`` header. This is the reverse of :func:`parse_extension`. @@ -372,7 +373,7 @@ def build_extension(extensions: Sequence[ExtensionHeader]) -> str: ) -build_extension_list = build_extension # alias for backwards-compatibility +build_extension_list = build_extension # alias for backwards compatibility def parse_subprotocol_item( @@ -383,7 +384,7 @@ def parse_subprotocol_item( Return the subprotocol value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ item, pos = parse_token(header, pos, header_name) @@ -394,18 +395,20 @@ def parse_subprotocol(header: str) -> List[Subprotocol]: """ Parse a ``Sec-WebSocket-Protocol`` header. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + Return a list of WebSocket subprotocols. + + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol") -parse_subprotocol_list = parse_subprotocol # alias for backwards-compatibility +parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility def build_subprotocol(protocols: Sequence[Subprotocol]) -> str: """ - Unparse a ``Sec-WebSocket-Protocol`` header. + Build a ``Sec-WebSocket-Protocol`` header. This is the reverse of :func:`parse_subprotocol`. @@ -413,12 +416,14 @@ def build_subprotocol(protocols: Sequence[Subprotocol]) -> str: return ", ".join(protocols) -build_subprotocol_list = build_subprotocol # alias for backwards-compatibility +build_subprotocol_list = build_subprotocol # alias for backwards compatibility def build_www_authenticate_basic(realm: str) -> str: """ - Build an WWW-Authenticate header for HTTP Basic Auth. + Build a ``WWW-Authenticate`` header for HTTP Basic Auth. + + :param realm: authentication realm """ # https://tools.ietf.org/html/rfc7617#section-2 @@ -427,18 +432,6 @@ def build_www_authenticate_basic(realm: str) -> str: return f"Basic realm={realm}, charset={charset}" -def build_authorization_basic(username: str, password: str) -> str: - """ - Build an Authorization header for HTTP Basic Auth. - - """ - # https://tools.ietf.org/html/rfc7617#section-2 - assert ":" not in username - user_pass = f"{username}:{password}" - basic_credentials = base64.b64encode(user_pass.encode()).decode() - return "Basic " + basic_credentials - - _token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*") @@ -448,7 +441,7 @@ def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]: Return the token value and the new position. - Raise :exc:`~websockets.exceptions.InvalidHeaderFormat` on invalid inputs. + :raises ~websockets.exceptions.InvalidHeaderFormat: on invalid inputs. """ match = _token68_re.match(header, pos) @@ -468,10 +461,14 @@ def parse_end(header: str, pos: int, header_name: str) -> None: def parse_authorization_basic(header: str) -> Tuple[str, str]: """ - Parse an Authorization header for HTTP Basic Auth. + Parse an ``Authorization`` header for HTTP Basic Auth. Return a ``(username, password)`` tuple. + :param header: value of the ``Authorization`` header + :raises InvalidHeaderFormat: on invalid inputs + :raises InvalidHeaderValue: on unsupported inputs + """ # https://tools.ietf.org/html/rfc7235#section-2.1 # https://tools.ietf.org/html/rfc7617#section-2 @@ -500,3 +497,17 @@ def parse_authorization_basic(header: str) -> Tuple[str, str]: ) from None return username, password + + +def build_authorization_basic(username: str, password: str) -> str: + """ + Build an ``Authorization`` header for HTTP Basic Auth. + + This is the reverse of :func:`parse_authorization_basic`. + + """ + # https://tools.ietf.org/html/rfc7617#section-2 + assert ":" not in username + user_pass = f"{username}:{password}" + basic_credentials = base64.b64encode(user_pass.encode()).decode() + return "Basic " + basic_credentials diff --git a/src/websockets/http.py b/src/websockets/http.py index 46b09c2e6..e78a149ed 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -1,8 +1,8 @@ """ -The :mod:`websockets.http` module provides basic HTTP parsing and -serialization. It is merely adequate for WebSocket handshake messages. +:mod:`websockets.http` module provides basic HTTP/1.1 support. It is merely +:adequate for WebSocket handshake messages. -Its functions cannot be imported from :mod:`websockets`. They must be imported +These APIs cannot be imported from :mod:`websockets`. They must be imported from :mod:`websockets.http`. """ @@ -26,10 +26,10 @@ __all__ = [ - "Headers", - "MultipleValuesError", "read_request", "read_response", + "Headers", + "MultipleValuesError", "USER_AGENT", ] @@ -69,22 +69,21 @@ def d(value: bytes) -> str: async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: """ - Read an HTTP/1.1 GET request from ``stream``. - - ``stream`` is an :class:`~asyncio.StreamReader`. - - Return ``(path, headers)`` where ``path`` is a :class:`str` and - ``headers`` is a :class:`Headers` instance. + Read an HTTP/1.1 GET request and returns ``(path, headers)``. ``path`` isn't URL-decoded or validated in any way. - Non-ASCII characters are represented with surrogate escapes. + ``path`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. - Raise an exception if the request isn't well formatted. + :func:`read_request` doesn't attempt to read the request body because + WebSocket handshake requests don't have one. If the request contains a + body, it may be read from ``stream`` after this coroutine returns. - Don't attempt to read the request body because WebSocket handshake - requests don't have one. If the request contains a body, it may be - read from ``stream`` after this coroutine returns. + :param stream: input to read the request from + :raises EOFError: if the connection is closed without a full HTTP request + :raises SecurityError: if the request exceeds a security limit + :raises ValueError: if the request isn't well formatted """ # https://tools.ietf.org/html/rfc7230#section-3.1.1 @@ -116,21 +115,19 @@ async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Headers"]: """ - Read an HTTP/1.1 response from ``stream``. + Read an HTTP/1.1 response and returns ``(status_code, reason, headers)``. - ``stream`` is an :class:`~asyncio.StreamReader`. + ``reason`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. - Return ``(status_code, reason, headers)`` where ``status_code`` is an - :class:`int`, ``reason`` is a :class:`str`, and ``headers`` is a - :class:`Headers` instance. + :func:`read_request` doesn't attempt to read the response body because + WebSocket handshake responses don't have one. If the response contains a + body, it may be read from ``stream`` after this coroutine returns. - Non-ASCII characters are represented with surrogate escapes. - - Raise an exception if the response isn't well formatted. - - Don't attempt to read the response body, because WebSocket handshake - responses don't have one. If the response contains a body, it may be - read from ``stream`` after this coroutine returns. + :param stream: input to read the response from + :raises EOFError: if the connection is closed without a full HTTP response + :raises SecurityError: if the response exceeds a security limit + :raises ValueError: if the response isn't well formatted """ # https://tools.ietf.org/html/rfc7230#section-3.1.2 @@ -169,10 +166,6 @@ async def read_headers(stream: asyncio.StreamReader) -> "Headers": """ Read HTTP headers from ``stream``. - ``stream`` is an :class:`~asyncio.StreamReader`. - - Return a :class:`Headers` instance - Non-ASCII characters are represented with surrogate escapes. """ @@ -213,9 +206,7 @@ async def read_line(stream: asyncio.StreamReader) -> bytes: """ Read a single line from ``stream``. - ``stream`` is an :class:`~asyncio.StreamReader`. - - Return :class:`bytes` without CRLF. + CRLF is stripped from the return value. """ # Security: this is bounded by the StreamReader's limit (default = 32 KiB). @@ -244,7 +235,7 @@ def __str__(self) -> str: class Headers(MutableMapping[str, str]): """ - Data structure for working with HTTP headers efficiently. + Efficient data structure for manipulating HTTP headers. A :class:`list` of ``(name, values)`` is inefficient for lookups. @@ -273,9 +264,10 @@ class Headers(MutableMapping[str, str]): As long as no header occurs multiple times, :class:`Headers` behaves like :class:`dict`, except keys are lower-cased to provide case-insensitivity. - :meth:`get_all()` returns a list of all values for a header and - :meth:`raw_items()` returns an iterator of ``(name, values)`` pairs, - similar to :meth:`http.client.HTTPMessage`. + Two methods support support manipulating multiple values explicitly: + + - :meth:`get_all` returns a list of all values for a header; + - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs. """ @@ -348,12 +340,14 @@ def get_all(self, key: str) -> List[str]: """ Return the (possibly empty) list of all values for a header. + :param key: header name + """ return self._dict.get(key.lower(), []) def raw_items(self) -> Iterator[Tuple[str, str]]: """ - Return an iterator of (header name, header value). + Return an iterator of all values as ``(name, value)`` pairs. """ return iter(self._list) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index acc45e87b..fa369450b 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1,6 +1,7 @@ """ -The :mod:`websockets.protocol` module handles WebSocket control and data -frames as specified in `sections 4 to 8 of RFC 6455`_. +:mod:`websockets.protocol` handles WebSocket control and data frames. + +See `sections 4 to 8 of RFC 6455`_. .. _sections 4 to 8 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 @@ -62,16 +63,24 @@ class State(enum.IntEnum): class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): """ - This class implements common parts of the WebSocket protocol. + :class:`~asyncio.Protocol` subclass implementing the data transfer phase. + + Once the WebSocket connection is established, during the data transfer + phase, the protocol is almost symmetrical between the server side and the + client side. :class:`WebSocketCommonProtocol` implements logic that's + shared between servers and clients.. + + Subclasses such as :class:`~websockets.server.WebSocketServerProtocol` and + :class:`~websockets.client.WebSocketClientProtocol` implement the opening + handshake, which is different between servers and clients. - It assumes that the WebSocket connection is established. The handshake is - managed in subclasses such as - :class:`~websockets.server.WebSocketServerProtocol` and - :class:`~websockets.client.WebSocketClientProtocol`. + :class:`WebSocketCommonProtocol` performs four functions: - It runs a task that stores incoming data frames in a queue and deals with - control frames automatically. It sends outgoing data frames and performs - the closing handshake. + * It runs a task that stores incoming data frames in a queue and makes + them available with the :meth:`recv` coroutine. + * It sends outgoing data frames with the :meth:`send` coroutine. + * It deals with control frames automatically. + * It performs the closing handshake. :class:`WebSocketCommonProtocol` supports asynchronous iteration:: @@ -81,20 +90,23 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): The iterator yields incoming messages. It exits normally when the connection is closed with the close code 1000 (OK) or 1001 (going away). It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception - when the connection is closed with any other status code. + when the connection is closed with any other code. - The ``host``, ``port`` and ``secure`` parameters are simply stored as - attributes for handlers that need them. + When initializing a :class:`WebSocketCommonProtocol`, the ``host``, + ``port``, and ``secure`` parameters are stored as attributes for backwards + compatibility. Consider using :attr:`local_address` on the server side and + :attr:`remote_address` on the client side instead. Once the connection is open, a `Ping frame`_ is sent every ``ping_interval`` seconds. This serves as a keepalive. It helps keeping the connection open, especially in the presence of proxies with short - timeouts. Set ``ping_interval`` to ``None`` to disable this behavior. + timeouts on inactive connections. Set ``ping_interval`` to ``None`` to + disable this behavior. .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` - seconds, the connection is considered unusable and is closed with status + seconds, the connection is considered unusable and is closed with code 1011. This ensures that the remote endpoint remains responsive. Set ``ping_timeout`` to ``None`` to disable this behavior. @@ -102,11 +114,11 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): The ``close_timeout`` parameter defines a maximum wait time in seconds for completing the closing handshake and terminating the TCP connection. - :meth:`close()` completes in at most ``4 * close_timeout`` on the server + :meth:`close` completes in at most ``4 * close_timeout`` on the server side and ``5 * close_timeout`` on the client side. ``close_timeout`` needs to be a parameter of the protocol because - websockets usually calls :meth:`close()` implicitly: + websockets usually calls :meth:`close` implicitly: - on the server side, when the connection handler terminates, - on the client side, when exiting the context manager for the connection. @@ -115,26 +127,26 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): The ``max_size`` parameter enforces the maximum size for incoming messages in bytes. The default value is 1 MiB. ``None`` disables the limit. If a - message larger than the maximum size is received, :meth:`recv()` will + message larger than the maximum size is received, :meth:`recv` will raise :exc:`~websockets.exceptions.ConnectionClosedError` and the - connection will be closed with status code 1009. + connection will be closed with code 1009. The ``max_queue`` parameter sets the maximum length of the queue that holds incoming messages. The default value is ``32``. ``None`` disables the limit. Messages are added to an in-memory queue when they're received; - then :meth:`recv()` pops from that queue. In order to prevent excessive + then :meth:`recv` pops from that queue. In order to prevent excessive memory consumption when messages are received faster than they can be processed, the queue must be bounded. If the queue fills up, the protocol - stops processing incoming data until :meth:`recv()` is called. In this + stops processing incoming data until :meth:`recv` is called. In this situation, various receive buffers (at least in ``asyncio`` and in the OS) will fill up, then the TCP receive window will shrink, slowing down transmission to avoid packet loss. Since Python can use up to 4 bytes of memory to represent a single - character, each websocket connection may use up to ``4 * max_size * - max_queue`` bytes of memory to store incoming messages. By default, - this is 128 MiB. You may want to lower the limits, depending on your - application's requirements. + character, each connection may use up to ``4 * max_size * max_queue`` + bytes of memory to store incoming messages. By default, this is 128 MiB. + You may want to lower the limits, depending on your application's + requirements. The ``read_limit`` argument sets the high-water limit of the buffer for incoming bytes. The low-water limit is half the high-water limit. The @@ -154,14 +166,14 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): :attr:`request_headers` and :attr:`response_headers` attributes, which are :class:`~websockets.http.Headers` instances. - These attributes must be treated as immutable. - If a subprotocol was negotiated, it's available in the :attr:`subprotocol` attribute. - Once the connection is closed, the status code is available in the + Once the connection is closed, the code is available in the :attr:`close_code` attribute and the reason in :attr:`close_reason`. + All these attributes must be treated as read-only. + """ # There are only two differences between the client-side and server-side @@ -187,7 +199,7 @@ def __init__( legacy_recv: bool = False, timeout: Optional[float] = None, ) -> None: - # Backwards-compatibility: close_timeout used to be called timeout. + # Backwards compatibility: close_timeout used to be called timeout. if timeout is None: timeout = 10 else: @@ -229,7 +241,7 @@ def __init__( # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. # Subclasses implement the opening handshake and, on success, execute - # :meth:`connection_open()` to change the state to OPEN. + # :meth:`connection_open` to change the state to OPEN. self.state = State.CONNECTING logger.debug("%s - state = CONNECTING", self.side) @@ -248,7 +260,7 @@ def __init__( self.close_reason: str # Completed when the connection state becomes CLOSED. Translates the - # :meth:`connection_lost()` callback to a :class:`~asyncio.Future` + # :meth:`connection_lost` callback to a :class:`~asyncio.Future` # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are # translated by ``self.stream_reader``). self.connection_lost_waiter: asyncio.Future[None] = loop.create_future() @@ -341,11 +353,13 @@ def remote_address(self) -> Any: @property def open(self) -> bool: """ - This property is ``True`` when the connection is usable. + ``True`` when the connection is usable. - It may be used to detect disconnections but this is discouraged per - the EAFP_ principle. When ``open`` is ``False``, using the connection - raises a :exc:`~websockets.exceptions.ConnectionClosed` exception. + It may be used to detect disconnections. However, this approach is + discouraged per the EAFP_ principle. + + When ``open`` is ``False``, using the connection raises a + :exc:`~websockets.exceptions.ConnectionClosed` exception. .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp @@ -355,7 +369,7 @@ def open(self) -> bool: @property def closed(self) -> bool: """ - This property is ``True`` once the connection is closed. + ``True`` once the connection is closed. Be aware that both :attr:`open` and :attr:`closed` are ``False`` during the opening and closing sequences. @@ -392,16 +406,16 @@ async def __aiter__(self) -> AsyncIterator[Data]: async def recv(self) -> Data: """ - This coroutine receives the next message. + Receive the next message. - It returns a :class:`str` for a text frame and :class:`bytes` for a - binary frame. + Return a :class:`str` for a text frame and :class:`bytes` for a binary + frame. When the end of the message stream is reached, :meth:`recv` raises :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal connection closure and - :exc:`~websockets.exceptions.ConnectionClosedError`after a protocol + :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol error or a network failure. .. versionchanged:: 3.0 @@ -414,9 +428,9 @@ async def recv(self) -> Data: makes it possible to enforce a timeout by wrapping :meth:`recv` in :func:`~asyncio.wait_for`. - .. versionchanged:: 7.0 - - Calling :meth:`recv` concurrently raises :exc:`RuntimeError`. + :raises ~websockets.exceptions.ConnectionClosed: when the + connection is closed + :raises RuntimeError: if two coroutines call :meth:`recv` concurrently """ if self._pop_message_waiter is not None: @@ -473,19 +487,21 @@ async def send( self, message: Union[Data, Iterable[Data], AsyncIterable[Data]] ) -> None: """ - This coroutine sends a message. + Send a message. - It sends a string (:class:`str`) as a text frame and a bytes-like - object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) - as a binary frame. + A string (:class:`str`) is sent as a `Text frame`_. A bytestring or + bytes-like object (:class:`bytes`, :class:`bytearray`, or + :class:`memoryview`) is sent as a `Binary frame`_. - It also accepts an iterable or an asynchronous iterable of strings or - bytes-like objects. In that case the message is fragmented. Each item - is treated as a message fragment and sent in its own frame. All items - must be of the same type, or else :meth:`send` will raise a - :exc:`TypeError` and the connection will be closed. + .. _Text frame: https://tools.ietf.org/html/rfc6455#section-5.6 + .. _Binary frame: https://tools.ietf.org/html/rfc6455#section-5.6 - It raises a :exc:`TypeError` for other inputs. + :meth:`send` also accepts an iterable or an asynchronous iterable of + strings, bytestrings, or bytes-like objects. In that case the message + is fragmented. Each item is treated as a message fragment and sent in + its own frame. All items must be of the same type, or else + :meth:`send` will raise a :exc:`TypeError` and the connection will be + closed. Canceling :meth:`send` is discouraged. Instead, you should close the connection with :meth:`close`. Indeed, there only two situations where @@ -498,6 +514,8 @@ async def send( a fragmented message will cause a protocol error. Closing the connection has the same effect. + :raises TypeError: for unsupported inputs + """ await self.ensure_open() @@ -594,13 +612,11 @@ async def send( async def close(self, code: int = 1000, reason: str = "") -> None: """ - This coroutine performs the closing handshake. + Perform the closing handshake. - It waits for the other end to complete the handshake and for the TCP - connection to terminate. As a consequence, there's no need to await - :meth:`wait_closed`; :meth:`close` already does it. - - ``code`` must be an :class:`int` and ``reason`` a :class:`str`. + :meth:`close` waits for the other end to complete the handshake and + for the TCP connection to terminate. As a consequence, there's no need + to await :meth:`wait_closed`; :meth:`close` already does it. :meth:`close` is idempotent: it doesn't do anything once the connection is closed. @@ -612,6 +628,9 @@ async def close(self, code: int = 1000, reason: str = "") -> None: set a shorter ``close_timeout``. If you don't want to wait, let the Python process exit, then the OS will close the TCP connection. + :param code: WebSocket close code + :param reason: WebSocket close reason + """ try: await asyncio.wait_for( @@ -644,11 +663,11 @@ async def close(self, code: int = 1000, reason: str = "") -> None: # Wait for the close connection task to close the TCP connection. await asyncio.shield(self.close_connection_task) - async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: + async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: """ - This coroutine sends a ping. + Send a ping. - It returns a :class:`~asyncio.Future` which will be completed when the + Return a :class:`~asyncio.Future` which will be completed when the corresponding pong is received and which you may ignore if you don't want to wait. @@ -658,7 +677,7 @@ async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: pong_waiter = await ws.ping() await pong_waiter # only if you want to wait for the pong - By default, the ping contains four random bytes. The content may be + By default, the ping contains four random bytes. This payload may be overridden with the optional ``data`` argument which must be a string (which will be encoded to UTF-8) or a bytes-like object. @@ -689,15 +708,14 @@ async def ping(self, data: Optional[bytes] = None) -> Awaitable[None]: return asyncio.shield(self.pings[data]) - async def pong(self, data: bytes = b"") -> None: + async def pong(self, data: Data = b"") -> None: """ - This coroutine sends a pong. + Send a pong. An unsolicited pong may serve as a unidirectional heartbeat. - The content may be overridden with the optional ``data`` argument - which must be a string (which will be encoded to UTF-8) or a - bytes-like object. + The payload may be set with the optional ``data`` argument which must + be a string (which will be encoded to UTF-8) or a bytes-like object. Canceling :meth:`pong` is discouraged for the same reason as :meth:`ping`. @@ -744,7 +762,7 @@ async def ensure_open(self) -> None: if self.state is State.CLOSING: # If we started the closing handshake, wait for its completion to - # get the proper close code and status. self.close_connection_task + # get the proper close code and reason. self.close_connection_task # will complete within 4 or 5 * close_timeout after close(). The # CLOSING state also occurs when failing the connection. In that # case self.close_connection_task will complete even faster. diff --git a/src/websockets/server.py b/src/websockets/server.py index 42487480a..446f1db7f 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -1,5 +1,5 @@ """ -The :mod:`websockets.server` module defines a simple WebSocket server API. +:mod:`websockets.server` defines the WebSocket server APIs. """ @@ -62,7 +62,7 @@ class WebSocketServerProtocol(WebSocketCommonProtocol): """ - Complete WebSocket server implementation as an :class:`asyncio.Protocol`. + :class:`~asyncio.Protocol` subclass implementing a WebSocket server. This class inherits most of its methods from :class:`~websockets.protocol.WebSocketCommonProtocol`. @@ -92,7 +92,7 @@ def __init__( ] = None, **kwargs: Any, ) -> None: - # For backwards-compatibility with 6.0 or earlier. + # For backwards compatibility with 6.0 or earlier. if origins is not None and "" in origins: warnings.warn("use None instead of '' in origins", DeprecationWarning) origins = [None if origin == "" else origin for origin in origins] @@ -226,12 +226,11 @@ async def read_http_request(self) -> Tuple[str, Headers]: """ Read request line and headers from the HTTP request. - Raise :exc:`~websockets.exceptions.InvalidMessage` if the HTTP message - is malformed or isn't an HTTP/1.1 GET request. + If the request contains a body, it may be read from ``self.reader`` + after this coroutine returns. - Don't attempt to read the request body because WebSocket handshake - requests don't have one. If the request contains a body, it may be - read from ``self.reader`` after this coroutine returns. + :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is + malformed or isn't an HTTP/1.1 GET request """ try: @@ -269,7 +268,7 @@ def write_http_response( self.writer.write(response.encode()) if body is not None: - logger.debug("%s > Body (%d bytes)", self.side, len(body)) + logger.debug("%s > body (%d bytes)", self.side, len(body)) self.writer.write(body) async def process_request( @@ -278,12 +277,10 @@ async def process_request( """ Intercept the HTTP request and return an HTTP response if appropriate. - ``path`` is a :class:`str` and ``request_headers`` is a - :class:`~websockets.http.Headers` instance. - If ``process_request`` returns ``None``, the WebSocket handshake - continues. If it returns a status code, headers and a response body, - that HTTP response is sent and the connection is closed. In that case: + continues. If it returns 3-uple containing a status code, response + headers and a response body, that HTTP response is sent and the + connection is closed. In that case: * The HTTP status must be a :class:`~http.HTTPStatus`. * HTTP headers must be a :class:`~websockets.http.Headers` instance, a @@ -294,30 +291,32 @@ async def process_request( This coroutine may be overridden in a :class:`WebSocketServerProtocol` subclass, for example: - * to return a HTTP 200 :attr:`~http.HTTPStatus.OK` response on a given - path; then a load balancer can use this path for a health check; - * to authenticate the request and return a HTTP 401 - :attr:`~http.HTTPStatus.UNAUTHORIZED` or a HTTP 403 - :attr:`~http.HTTPStatus.FORBIDDEN` when authentication fails. + * to return a HTTP 200 OK response on a given path; then a load + balancer can use this path for a health check; + * to authenticate the request and return a HTTP 401 Unauthorized or a + HTTP 403 Forbidden when authentication fails. - Instead of subclassing, it is possible to pass a ``process_request`` - argument to the :class:`WebSocketServerProtocol` constructor or the - :func:`serve` function. This is equivalent, except the - ``process_request`` corountine doesn't have access to the protocol - instance, so it can't store information for later use. + Instead of subclassing, it is possible to override this method by + passing a ``process_request`` argument to the :func:`serve` function + or the :class:`WebSocketServerProtocol` constructor. This is + equivalent, except ``process_request`` won't have access to the + protocol instance, so it can't store information for later use. ``process_request`` is expected to complete quickly. If it may run for a long time, then it should await :meth:`wait_closed` and exit if :meth:`wait_closed` completes, or else it could prevent the server from shutting down. + :param path: request path, including optional query string + :param request_headers: request headers + """ if self._process_request is not None: response = self._process_request(path, request_headers) if isinstance(response, Awaitable): return await response else: - # For backwards-compatibility with 7.0. + # For backwards compatibility with 7.0. warnings.warn( "declare process_request as a coroutine", DeprecationWarning ) @@ -331,8 +330,10 @@ def process_origin( """ Handle the Origin HTTP request header. - Raise :exc:`~websockets.exceptions.InvalidOrigin` if the origin isn't - acceptable. + :param headers: request headers + :param origins: optional list of acceptable origins + :raises ~websockets.exceptions.InvalidOrigin: if the origin isn't + acceptable """ # "The user agent MUST NOT include more than one Origin header field" @@ -360,10 +361,6 @@ def process_extensions( Return the Sec-WebSocket-Extensions HTTP response header and the list of accepted extensions. - Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the - handshake with an HTTP 400 error code. (The default implementation - never does this.) - :rfc:`6455` leaves the rules up to the specification of each :extension. @@ -382,6 +379,11 @@ def process_extensions( Other requirements, for example related to mandatory extensions or the order of extensions, may be implemented by overriding this method. + :param headers: request headers + :param extensions: optional list of supported extensions + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + """ response_header_value: Optional[str] = None @@ -438,6 +440,11 @@ def process_subprotocol( Return Sec-WebSocket-Protocol HTTP response header, which is the same as the selected subprotocol. + :param headers: request headers + :param available_subprotocols: optional list of supported subprotocols + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + """ subprotocol: Optional[Subprotocol] = None @@ -467,16 +474,19 @@ def select_subprotocol( the default implementation selects the preferred subprotocols by giving equal value to the priorities of the client and the server. - If no subprotocols are supported by the client and the server, it + If no subprotocol is supported by the client and the server, it proceeds without a subprotocol. This is unlikely to be the most useful implementation in practice, as many servers providing a subprotocol will require that the client uses that subprotocol. Such rules can be implemented in a subclass. - This method may be overridden by passing a ``select_subprotocol`` - argument to the :class:`WebSocketServerProtocol` constructor or the - :func:`serve` function. + Instead of subclassing, it is possible to override this method by + passing a ``select_subprotocol`` argument to the :func:`serve` + function or the :class:`WebSocketServerProtocol` constructor + + :param client_subprotocols: list of subprotocols offered by the client + :param server_subprotocols: list of subprotocols available on the server """ if self._select_subprotocol is not None: @@ -500,27 +510,22 @@ async def handshake( """ Perform the server side of the opening handshake. - If provided, ``origins`` is a list of acceptable HTTP Origin values. - Include ``None`` if the lack of an origin is acceptable. - - If provided, ``available_extensions`` is a list of supported - extensions in the order in which they should be used. - - If provided, ``available_subprotocols`` is a list of supported - subprotocols in order of decreasing preference. - - If provided, ``extra_headers`` sets additional HTTP response headers - when the handshake succeeds. It can be a - :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` - pairs, or a callable taking the request path and headers in arguments - and returning one of the above. - - Raise :exc:`~websockets.exceptions.InvalidHandshake` if the handshake - fails. - Return the path of the URI of the request. + :param origins: list of acceptable values of the Origin HTTP header; + include ``None`` if the lack of an origin is acceptable + :param available_extensions: list of supported extensions in the order + in which they should be used + :param available_subprotocols: list of supported subprotocols in order + of decreasing preference + :param extra_headers: sets additional HTTP response headers when the + handshake succeeds; it can be a :class:`~websockets.http.Headers` + instance, a :class:`~collections.abc.Mapping`, an iterable of + ``(name, value)`` pairs, or a callable taking the request path and + headers in arguments and returning one of the above. + :raises ~websockets.exceptions.InvalidHandshake: if the handshake + fails + """ path, request_headers = await self.read_http_request() @@ -530,7 +535,7 @@ async def handshake( if isinstance(early_response_awaitable, Awaitable): early_response = await early_response_awaitable else: - # For backwards-compatibility with 7.0. + # For backwards compatibility with 7.0. warnings.warn("declare process_request as a coroutine", DeprecationWarning) early_response = early_response_awaitable # type: ignore @@ -589,21 +594,21 @@ async def handshake( class WebSocketServer: """ - Wrapper for :class:`~asyncio.Server` that closes connections on exit. + WebSocket server returned by :func:`~websockets.server.serve`. - This class provides the return type of :func:`~websockets.server.serve`. + This class provides the same interface as + :class:`~asyncio.AbstractServer`, namely the + :meth:`~asyncio.AbstractServer.close` and + :meth:`~asyncio.AbstractServer.wait_closed` methods. - It mimics the interface of :class:`~asyncio.AbstractServer`, namely its - :meth:`~asyncio.AbstractServer.close()` and - :meth:`~asyncio.AbstractServer.wait_closed()` methods, to close WebSocket - connections properly on exit, in addition to closing the underlying - :class:`~asyncio.Server`. + It keeps track of WebSocket connections in order to close them properly + when shutting down. Instances of this class store a reference to the :class:`~asyncio.Server` - object returned by :meth:`~asyncio.AbstractEventLoop.create_server` rather - than inherit from :class:`~asyncio.Server` in part because - :meth:`~asyncio.AbstractEventLoop.create_server` doesn't support passing a - custom :class:`~asyncio.Server` class. + object returned by :meth:`~asyncio.loop.create_server` rather than inherit + from :class:`~asyncio.Server` in part because + :meth:`~asyncio.loop.create_server` doesn't support passing a custom + :class:`~asyncio.Server` class. """ @@ -624,14 +629,13 @@ def wrap(self, server: asyncio.AbstractServer) -> None: """ Attach to a given :class:`~asyncio.Server`. - Since :meth:`~asyncio.AbstractEventLoop.create_server` doesn't support - injecting a custom ``Server`` class, the easiest solution that doesn't - rely on private :mod:`asyncio` APIs is to: + Since :meth:`~asyncio.loop.create_server` doesn't support injecting a + custom ``Server`` class, the easiest solution that doesn't rely on + private :mod:`asyncio` APIs is to: - instantiate a :class:`WebSocketServer` - give the protocol factory a reference to that instance - - call :meth:`~asyncio.AbstractEventLoop.create_server` with the - factory + - call :meth:`~asyncio.loop.create_server` with the factory - attach the resulting :class:`~asyncio.Server` with this method """ @@ -665,9 +669,18 @@ def is_serving(self) -> bool: def close(self) -> None: """ - Close the server and terminate connections with close code 1001. + Close the server. + + This method: + + * closes the underlying :class:`~asyncio.Server`; + * rejects new WebSocket connections with an HTTP 503 (service + unavailable) error; this happens when the server accepted the TCP + connection but didn't complete the WebSocket opening handshake prior + to closing; + * closes open WebSocket connections with close code 1001 (going away). - This method is idempotent. + :meth:`close` is idempotent. """ if self.close_task is None: @@ -716,10 +729,10 @@ async def _close(self) -> None: async def wait_closed(self) -> None: """ - Wait until the server is closed and all connections are terminated. + Wait until the server is closed. - When :meth:`wait_closed()` returns, all TCP connections are closed and - there are no pending tasks left. + When :meth:`wait_closed` returns, all TCP connections are closed and + all connection handlers have returned. """ await asyncio.shield(self.closed_waiter) @@ -737,77 +750,80 @@ def sockets(self) -> Optional[List[socket.socket]]: class Serve: """ - Create, start, and return a :class:`WebSocketServer`. - :func:`serve` returns an awaitable. Awaiting it yields an instance of - :class:`WebSocketServer` which provides - :meth:`~websockets.server.WebSocketServer.close` and + Create, start, and return a WebSocket server on ``host`` and ``port``. + + Whenever a client connects, the server accepts the connection, creates a + :class:`WebSocketServerProtocol`, performs the opening handshake, and + delegates to the connection handler defined by ``ws_handler``. Once the + handler completes, either normally or with an exception, the server + performs the closing handshake and closes the connection. + + Awaiting :func:`serve` yields a :class:`WebSocketServer`. This instance + provides :meth:`~websockets.server.WebSocketServer.close` and :meth:`~websockets.server.WebSocketServer.wait_closed` methods for terminating the server and cleaning up its resources. + When a server is closed with :meth:`~WebSocketServer.close`, it closes all + connections with close code 1001 (going away). Connections handlers, which + are running the ``ws_handler`` coroutine, will receive a + :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their + current or next interaction with the WebSocket connection. + :func:`serve` can also be used as an asynchronous context manager. In this case, the server is shut down when exiting the context. :func:`serve` is a wrapper around the event loop's - :meth:`~asyncio.AbstractEventLoop.create_server` method. Internally, it - creates and starts a :class:`~asyncio.Server` object by calling - :meth:`~asyncio.AbstractEventLoop.create_server`. The - :class:`WebSocketServer` it returns keeps a reference to this object. + :meth:`~asyncio.loop.create_server` method. It creates and starts a + :class:`~asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it + wraps the :class:`~asyncio.Server` in a :class:`WebSocketServer` and + returns the :class:`WebSocketServer`. The ``ws_handler`` argument is the WebSocket handler. It must be a coroutine accepting two arguments: a :class:`WebSocketServerProtocol` and the request URI. The ``host`` and ``port`` arguments, as well as unrecognized keyword - arguments, are passed along to - :meth:`~asyncio.AbstractEventLoop.create_server`. For example, you can set - the ``ssl`` keyword argument to a :class:`~ssl.SSLContext` to enable TLS. - - The ``create_protocol`` parameter allows customizing the asyncio protocol - that manages the connection. It should be a callable or class accepting - the same arguments as :class:`WebSocketServerProtocol` and returning a - :class:`WebSocketServerProtocol` instance. It defaults to + arguments, are passed along to :meth:`~asyncio.loop.create_server`. + + For example, you can set the ``ssl`` keyword argument to a + :class:`~ssl.SSLContext` to enable TLS. + + The ``create_protocol`` parameter allows customizing the + :class:`~asyncio.Protocol` that manages the connection. It should be a + callable or class accepting the same arguments as + :class:`WebSocketServerProtocol` and returning an instance of + :class:`WebSocketServerProtocol` or a subclass. It defaults to :class:`WebSocketServerProtocol`. - The behavior of the ``ping_interval``, ``ping_timeout``, ``close_timeout``, - ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` optional - arguments is described in the documentation of - :class:`~websockets.protocol.WebSocketCommonProtocol`. + The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is + described in :class:`~websockets.protocol.WebSocketCommonProtocol`. :func:`serve` also accepts the following optional arguments: * ``compression`` is a shortcut to configure compression extensions; by default it enables the "permessage-deflate" extension; set it to ``None`` to disable compression - * ``origins`` defines acceptable Origin HTTP headers — include ``None`` if + * ``origins`` defines acceptable Origin HTTP headers; include ``None`` if the lack of an origin is acceptable * ``extensions`` is a list of supported extensions in order of decreasing preference * ``subprotocols`` is a list of supported subprotocols in order of decreasing preference * ``extra_headers`` sets additional HTTP response headers when the - handshake succeeds — it can be a :class:`~websockets.http.Headers` + handshake succeeds; it can be a :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` pairs, or a callable taking the request path and headers in arguments and returning one of the above - * ``process_request`` is a coroutine taking the request path and headers - in argument, see :meth:`~WebSocketServerProtocol.process_request` for - details - * ``select_subprotocol`` is a callable taking the subprotocols offered by - the client and available on the server in argument, see + * ``process_request`` allows intercepting the HTTP request; it must be a + coroutine taking the request path and headers in argument; see + :meth:`~WebSocketServerProtocol.process_request` for details + * ``select_subprotocol`` allows customizing the logic for selecting a + subprotocol; it must be a callable taking the subprotocols offered by + the client and available on the server in argument; see :meth:`~WebSocketServerProtocol.select_subprotocol` for details - Whenever a client connects, the server accepts the connection, creates a - :class:`WebSocketServerProtocol`, performs the opening handshake, and - delegates to the WebSocket handler. Once the handler completes, the server - performs the closing handshake and closes the connection. - - When a server is closed with :meth:`~WebSocketServer.close`, it closes all - connections with close code 1001 (going away). WebSocket handlers — which - are running the coroutine passed in the ``ws_handler`` — will receive a - :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their - current or next interaction with the WebSocket connection. - Since there's no useful way to propagate exceptions triggered in handlers, they're sent to the ``'websockets.server'`` logger instead. Debugging is much easier if you configure logging to print them:: @@ -851,7 +867,7 @@ def __init__( ] = None, **kwargs: Any, ) -> None: - # Backwards-compatibility: close_timeout used to be called timeout. + # Backwards compatibility: close_timeout used to be called timeout. if timeout is None: timeout = 10 else: @@ -860,7 +876,7 @@ def __init__( if close_timeout is None: close_timeout = timeout - # Backwards-compatibility: create_protocol used to be called klass. + # Backwards compatibility: create_protocol used to be called klass. if klass is None: klass = WebSocketServerProtocol else: @@ -968,16 +984,16 @@ def unix_serve( **kwargs: Any, ) -> Serve: """ - Similar to :func:`serve()`, but for listening on Unix sockets. - - ``path`` is the path to the Unix socket. + Similar to :func:`serve`, but for listening on Unix sockets. This function calls the event loop's - :meth:`~asyncio.AbstractEventLoop.create_unix_server` method. + :meth:`~asyncio.loop.create_unix_server` method. It is only available on Unix. It's useful for deploying a server behind a reverse proxy such as nginx. + :param path: file system path to the Unix socket + """ return serve(ws_handler, path=path, **kwargs) diff --git a/src/websockets/typing.py b/src/websockets/typing.py index 651b40bbe..3847701b2 100644 --- a/src/websockets/typing.py +++ b/src/websockets/typing.py @@ -12,7 +12,7 @@ - :class:`bytes` for binary messages """ - +# Remove try / except when dropping support for Python < 3.7 try: Data.__doc__ = Data__doc__ # type: ignore except AttributeError: # pragma: no cover @@ -20,9 +20,26 @@ Origin = NewType("Origin", str) +Origin.__doc__ = """Value of a Origin header""" + ExtensionParameter = Tuple[str, Optional[str]] +ExtensionParameter__doc__ = """Parameter of a WebSocket extension""" +try: + ExtensionParameter.__doc__ = ExtensionParameter__doc__ # type: ignore +except AttributeError: # pragma: no cover + pass + + ExtensionHeader = Tuple[str, List[ExtensionParameter]] +ExtensionHeader__doc__ = """Item parsed in a Sec-WebSocket-Extensions header""" +try: + ExtensionHeader.__doc__ = ExtensionHeader__doc__ # type: ignore +except AttributeError: # pragma: no cover + pass + + Subprotocol = NewType("Subprotocol", str) +Subprotocol.__doc__ = """Items parsed in a Sec-WebSocket-Protocol header""" diff --git a/src/websockets/uri.py b/src/websockets/uri.py index 16d3d6761..cbb56524b 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -1,6 +1,7 @@ """ -The :mod:`websockets.uri` module implements parsing of WebSocket URIs -according to `section 3 of RFC 6455`_. +:mod:`websockets.uri` parses WebSocket URIs. + +See `section 3 of RFC 6455`_. .. _section 3 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-3 @@ -31,25 +32,30 @@ class WebSocketURI(NamedTuple): WebSocketURI.__doc__ = """ WebSocket URI. -* ``secure`` is the secure flag -* ``host`` is the lower-case host -* ``port`` if the integer port, it's always provided even if it's the default -* ``resource_name`` is the resource name, that is, the path and optional query -* ``user_info`` is an ``(username, password)`` tuple when the URI contains +:param bool secure: secure flag +:param str host: lower-case host +:param int port: port, always set even if it's the default +:param str resource_name: path and optional query +:param str user_info: ``(username, password)`` tuple when the URI contains `User Information`_, else ``None``. .. _User Information: https://tools.ietf.org/html/rfc3986#section-3.2.1 - """ +# Work around https://bugs.python.org/issue19931 + +WebSocketURI.secure.__doc__ = "" +WebSocketURI.host.__doc__ = "" +WebSocketURI.port.__doc__ = "" +WebSocketURI.resource_name.__doc__ = "" +WebSocketURI.user_info.__doc__ = "" + def parse_uri(uri: str) -> WebSocketURI: """ - This function parses and validates a WebSocket URI. - - If the URI is valid, it returns a :class:`WebSocketURI`. + Parse and validate a WebSocket URI. - Otherwise it raises an :exc:`~websockets.exceptions.InvalidURI` exception. + :raises ValueError: if ``uri`` isn't a valid WebSocket URI. """ parsed = urllib.parse.urlparse(uri) diff --git a/src/websockets/utils.py b/src/websockets/utils.py index e289e6980..40ac8559f 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -8,9 +8,8 @@ def apply_mask(data: bytes, mask: bytes) -> bytes: """ Apply masking to the data of a WebSocket message. - ``data`` and ``mask`` are bytes-like objects. - - Return :class:`bytes`. + :param data: Data to mask + :param mask: 4-bytes mask """ if len(mask) != 4: diff --git a/tests/test_auth.py b/tests/test_auth.py index 07341df56..97a4485a0 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -37,14 +37,14 @@ def test_basic_auth(self): self.loop.run_until_complete(self.client.recv()) def test_basic_auth_server_no_credentials(self): - with self.assertRaises(ValueError) as raised: + with self.assertRaises(TypeError) as raised: basic_auth_protocol_factory(realm="auth-tests", credentials=None) self.assertEqual( str(raised.exception), "provide either credentials or check_credentials" ) def test_basic_auth_server_bad_credentials(self): - with self.assertRaises(ValueError) as raised: + with self.assertRaises(TypeError) as raised: basic_auth_protocol_factory(realm="auth-tests", credentials=42) self.assertEqual(str(raised.exception), "invalid credentials argument: 42") @@ -60,7 +60,7 @@ def test_basic_auth_server_multiple_credentials(self): self.loop.run_until_complete(self.client.recv()) def test_basic_auth_bad_multiple_credentials(self): - with self.assertRaises(ValueError) as raised: + with self.assertRaises(TypeError) as raised: basic_auth_protocol_factory( realm="auth-tests", credentials=[("hello", "iloveyou"), 42] ) From 2f10791b875746ba9a8f59ea6e1f3129ffa37740 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 10:31:19 +0200 Subject: [PATCH 123/281] Use monospace font consistently for the project name. --- compliance/README.rst | 4 ++-- docs/changelog.rst | 6 +++--- docs/contributing.rst | 4 ++-- src/websockets/protocol.py | 8 ++++---- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/compliance/README.rst b/compliance/README.rst index cbb4ca2c7..8570f9176 100644 --- a/compliance/README.rst +++ b/compliance/README.rst @@ -30,8 +30,8 @@ Then kill the first one with Ctrl-C. The test client or server shouldn't display any exceptions. The results are stored in reports/clients/index.html. -Note that the Autobahn software only supports Python 2, while websockets only -supports Python 3; you need two different environments. +Note that the Autobahn software only supports Python 2, while ``websockets`` +only supports Python 3; you need two different environments. Conformance notes ----------------- diff --git a/docs/changelog.rst b/docs/changelog.rst index aa4a76259..c79f0f0dd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -138,8 +138,8 @@ Also: Also: -* websockets sends Ping frames at regular intervals and closes the connection - if it doesn't receive a matching Pong frame. See +* ``websockets`` sends Ping frames at regular intervals and closes the + connection if it doesn't receive a matching Pong frame. See :class:`~protocol.WebSocketCommonProtocol` for details. * Added ``process_request`` and ``select_subprotocol`` arguments to @@ -217,7 +217,7 @@ Also: **Version 5.0 fixes a security issue introduced in version 4.0.** - websockets 4.0 was vulnerable to denial of service by memory exhaustion + Version 4.0 was vulnerable to denial of service by memory exhaustion because it didn't enforce ``max_size`` when decompressing compressed messages (`CVE-2018-1000518`_). diff --git a/docs/contributing.rst b/docs/contributing.rst index 00a529243..40f1dbb54 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -55,7 +55,7 @@ cryptocurrency trackers. I'm strongly opposed to Bitcoin's carbon footprint. Please stop heating the planet where my children are supposed to live, thanks. -Since websockets is released under an open-source license, you can use it for -any purpose you like. However, I won't spend any of my time to help. +Since ``websockets`` is released under an open-source license, you can use it +for any purpose you like. However, I won't spend any of my time to help. I will summarily close issues related to Bitcoin or cryptocurrency in any way. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index fa369450b..7d1560927 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -118,7 +118,7 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): side and ``5 * close_timeout`` on the client side. ``close_timeout`` needs to be a parameter of the protocol because - websockets usually calls :meth:`close` implicitly: + ``websockets`` usually calls :meth:`close` implicitly: - on the server side, when the connection handler terminates, - on the client side, when exiting the context manager for the connection. @@ -1298,15 +1298,15 @@ def eof_received(self) -> bool: See http://bugs.python.org/issue24539 for more information. - This is inappropriate for websockets for at least three reasons: + This is inappropriate for ``websockets`` for at least three reasons: 1. The use case is to read data until EOF with self.reader.read(-1). - Since websockets is a TLV protocol, this never happens. + Since WebSocket is a TLV protocol, this never happens. 2. It doesn't work on TLS connections. A falsy value must be returned to have the same behavior on TLS and plain connections. - 3. The websockets protocol has its own closing handshake. Endpoints + 3. The WebSocket protocol has its own closing handshake. Endpoints close the TCP connection after sending a close frame. As a consequence we revert to the previous, more useful behavior. From c7d795ee91804fac5f869b31b41001695f265a36 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 10:53:22 +0200 Subject: [PATCH 124/281] Improve pings dict in protocol diagram. It's more like a queue than like a coroutine. --- docs/protocol.graffle | Bin 4664 -> 4740 bytes docs/protocol.svg | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/protocol.graffle b/docs/protocol.graffle index 13fdb307ef5907b16265b4fcb669180e509c71cc..df76f49607e5743b809e6f41787262932b6b0743 100644 GIT binary patch literal 4740 zcmV-~5_|0*iwFP!000030PS6CbK5r7{@nZuz4^FX)3_7prrB|vbdzqHcw;Boc0ALe zC0OQ$B2|)#<7V>T-vdgPcmXX+m9K)^iEQxz1VNnVTygN^pI=5%@3T&`Fo}PBj6L+Y zr{jJy2;bcdN8dg9q%WTC_4DwvejQxs^i@32U!LrmZ{cDX=X$8qr|8M< z<*%1b;ND?B$f3&*)1K#zf;0`xFW)`M@)REVDU8rdMsYYylgZe73%~s<4NgxZEuQSo zNJ`XLD34GWKJkSRq(t&$_p2XMRbS=gVG z&7OTmW+<}DYx7}z{ffTtp>N;!zG8~m_dOVv{PwLfVAEnh3bJP7!z2n9cda#f^?X5p zIjf{R!Beus{36l|%F8iNFG82er^VIM<>~e3X>h(6)?#39-j;_0GdiD&%ik8iOy@Au zza`12!;07s;?F_$HVud2Vl7-wW-*uLSGj0$82+gn^rbm(v`V7MDBg?0VcZxMZjP#; zJ{$*q=weQv>L7Ybxu=RB!W1W5D)nTyxNzAN{xIwNd5|{-ND%gSklI14hw%>#|3F3W z?+AVxqcxo*X}$g4#Ym@NzqdCT!umB6R&Dk?%*Ii0ao7){#=U`o;iuOh^?vNlf3J7& zQyA%^i*ciI08CS-Ef(tYq(2$yIInZ_d}>K!Zf6TTGk{~^{_2XYGRk~Bufted7-jYl zNu0lmPm}g_ZYV6v{H2aQ>m2Z@(&vn?O6$#hSG;obbhCF~=86WxF6Kk{6_%Y+9whKp z`6=hKQ34jqkcYX?d`T(cSaJG&k4oYR-^YSt!jNQKeX9-qvNa>z!%9fzE6fE`3@>lJ z08T5mG-fqB3kJz~d6E_cc6@0AnDSiOk*nGFOC1dM(zG&w1+v0h{WQB;zR#j*g=b1{ z*yE*FJfnZDhe7|MLVOxTSs_E_L`OjwwT=hbhVd~Rthkn%ASiZUb0B=-r%h`^h8?b^1o19~}O2)R~LX%3LfgT)c!kPQxg=Cf>MJzU$og zm5aQ_XVx`7<176oZ=dCgfG@u?RDEMJ;0*?i)gUY5Xx?$bLC0x+ijRXh3yXjH;~^%T z9QTuc5|~RIvFRV#1a zgNetLWP~d2OTv8IX4ch>LSMimH4MdWtqGP*n^58hYZF>+uui6(;kB}tq=LVshQGIA z4KD84{$|R|xEMXRijOLGEU4?+!4{$y*`z;*VeBD71y+J%UxB*1ia%#rYv<2-(Ec1E zsRuHRz@kuu5K4`UuO{+@G-i%3JWjzPB7y+G3x&kCn>pCR#RijBg->DEz8fmTd?|o% zN5H>AQ4snZDbi5N-z9kwPpAo;HhsVTyv|s2#@dz`YgrTl@WUjql?}H(Jb#)*$B%Br zRrTPMAaqkEoHb@qQ+tHCVG|CPyN9vFNcs#$1jAT9sCGS&k!Bc%x3=yQ#HqJ-Hry@S za4U$FHB30dx85q_YF(?W85aO08f-1&jo}ChDL_Z&ZQRI(@Du@M8*$FC0yTVjYsZcE zhqX9a#XCYBAIeSfp~>qaCdCh!i~0yt!kD0|@Zqx7jt}pXn-V)lT!|6=O$x;B@d+y< zE@|x;@xjImwu%wW%2;(eDZAR6XJ>A4Q|7mgota=@ZDJ{X#ElV3{2MmEZ3uS zF^NEK7du>h%icW4#o~5CB&jR=~BL z){D5dPo{irr+INz5Lp%^+Be?)3Wt;^rX(lj%X8I)q3Ms9eO97hvz9|kOKhP_t#^>q zjxko#UuZS@TZXi|6~MaYh}7*wk7mqwcSA^j^xI-ZCnj3NX3?T3$@I5nTjJJ-T%!jx zg9>*Oa_hX`SjZ<~c8`Lu4pw>aH^UpkB|4gRi!ODR9;6na<-H<<1!b5^6sR(6?FbZ;m3r|$TWUWCh zOY3_4fVquHP2fQIws$6V0k7@hAGS9CG{VO#@sFi--5D&UW1t7Ov+uz%5M7&rgzx!S za7ujS6U?YsiGeJw>uxjYB#>jEd!8${T8ao+Be9IFy^X?}+0>nbEv=g#4wo0@G{PGd zXl(7>40TVO&z3IeD;oRDdC9TEea!je8v%4DAJ>`AbrP?czfR(HX??D+@tJtMT<@i@ z3X$@BL498W3?NP$8!4_k^SV|pQ4tP*C9M}jLDF!NjY?bTbGM_Q)2I^AgU|r z);r96Pzo^j0k#G9xnEX2l`oj!vd}#l_e}jpYOo)G9~Vk$!%ehB1svHu+*WZ}*s%_X z3e^FzcstFPwto?Ypc|yk_jTVi+GUz*yN-*UM%!95l5MPJr10@NitaIV`Vdol%i8cr zXIU`R+3`&sLeMrWe0)-_XgSb-O!TUCEJ1=?fxKq*ywg!&PJP@1Dw_z~K^{TXAAETr zq5)cqNGiH*!9wCU!0;mospg%l9O}#vyDHO3mQuCJFqfqQb=NqTXQn9 zi*s$1$@w}=zR_5AJz=B0LZ-EK?JNVq9FyOFCZ~?cH^}6eUAIG#`nOp{tohx#{zTs+ zq!g=DSgz$??09HkTUD`o6|dz_Se~C*Twf3*l#j52){1+?=UnQa!}(3rUtq`Q)Wqg|{Fqb*I9+g9hEH@Es2q90YlALnjNa26q<6 zyMsFm++hcI4(=S>b%eXS2z1Q@Jsi+kfDSpJb3o^St|QQ$T;#WibGD6&^|OS1;oO96 zU`JdXQ0FFeZo-GOFEJSWK1;{mpr6o+v(2PdhqG-s>vn`YoOL++z;M?1G&dfSu|aJ; z56@||?MO=<(mJGdNc*sm_9o)q!e^d=PQd|T8K|?$pe&Kpt0F2;N`Pq06wnIuc#Pfw zl>MO7K@bPmhpgBd0czQm9VL^&#h?5H)*4(53f>NsB=Bj=}P3FafNYW_|u zICg20uF~YDAanjMp~DZ_Fruy1SL`ud)L~#gN1SkK&SGnP_ibz8I1Zx{7S9;+zIb}O z9O48@)OnJA+WKQf2)U_TcS^XYBqJQOG$}w1@eUhVt~=Mgo*}0AD!J~qNAktAm@*w*( zD{R)g_3~=-tgj;&Rht)Jv1$&Ypsz=|qR!cK_STS-^}KzCn_HJy;lj1;7q^@p^C0Eq z%b%jqQYr4&7l+Mp+hwYM4`u4TDHRIB1aT?YEE2bZ5T=BhSOw#MrBvulMvZ|@t#0Wo z9iH5hOL}u|<4))4-HGy|WpN4b^i)7h9S)sIN;o+Wvh!%`T1EJo)lb+2(s28dt;7BJ~3l6|%HBAGam>_oB?$xbBS zfy~ysE0W(8qjMs;vq-M(y^zee?#Cz`($(6W^sd%ixb~c4zWG`UcladItMA~FeO(cu;_NuF9cc|E z_me)!lKv+!v5me9%I-&fa#(z^pNvLHe2^wN%wcrPNd+}r^6lxIT~s)Xu$0>XOZ3jD zaQNCi2u_7_D%^cS-Bgc~T=#T57@KVNr3&1_?>O5$84k*zGk-DjtB2#D5B<&G&dlca zWYv~kEQ6V|^?ylfZ9ezW){uO<+>`Unv|5ncvzeyUaWKw~k`DBXr!Ssr5)O3s+K8rk zLn|AWs6T{Rc(MSzmfIBRSO?~q)H$IQL#I$X0zyy+;M=r8jvM6v)_Q!D9QK3KDCXJn z(@go`LZ^#6&;83a(P$ognQvrD`6d`y{LIB|dbmk`kXpBOSQF6>!S2vmw_$t;_4(3n z+ZH&CHkst@OMEX5M?tO|3#jHRGFSUy+E0E7UBgk~~jFzXa(pEG>spRBP7XX_N$cm8)mJGhF511!7I_@G?om zKh3T#7PeXz9#)*C(B%l{m_{5ksdBS!ka&!XR6y-fDlf1ds4)!~b?y|cIP z5yvMVe~$XYefWFu_C5aX?BwN#Xb-OMzu-qlJX_#geeyrnqjo$yb4~r-~oM z1SMQZFt}88!<^-&X7#<(pFyB$3rpOG*Z zrs~~Z-^8mrXm>?pqW}bPU$5BqeH{KV(F+Uvil={{=@)SxrdLh-hpvj|$1nyXvK`9z zCus^ZQOsmhPq(P)FJ>mu@6#zR>2V9$JM&ni6?j^tn=(KFz=9&=Ckgz4rL2X+<`zN4 zDYDpDJiq>K5-p^1g8*L9NwDm4)x2E@Tx5BAV7tR8IC-fJf7kRNN^5GCdlOg#Kfa#i9;lPsXVquW)EWO)&FJ&-l0LS8U S908|1ef0mR>Y&W>^8f(A2xIa9 literal 4664 zcmV-8636WyiwFP!000030PS7tbJIE&|9tr?vV7d7uHLp-E<2@#g}Xqzq`+RLGk4rX zO?`3fU^|pvhW~w!oR=SRvoAN=ya~Bal2=h*eS5O4zlDoIl&OJAo}wq)vyZbT zaPOe!XVB$`Nzd~}evq@uq(F^@ zbO?pv6ITd93M5aqXI)Q*FwWwg{-5JCo8LN1#%g3$N^I?4djvjT;k4K&Fm}2&*1H+O(ev}5RTkM8@T5sHq!(egON|RT63;NltlHvqU z$hz5is1}s7F;6Z+m+|Mt)xzb;^}WPDTMTP4u($ig;lPZ}r(*VL@nJHDsrnSh!xk%I z*N?vX>3$Lng2h^xO=doq#aFp#(GC7mHTuGw*ILElco^-3!62%Q3fD)KQ+G#x54xDs zC&~|>Qtrt7hcL+r_oRHXonM$Wg+KKA-t)8C013kG7Lr?tbufO+@M9`E|3L6l8?Ejn zPO9y9&W9=qdYzr|0M@UbuyV7#ARUGNdAH|>wR-~t!%wF-?EKo9f39}$A_&#t`KZ>| z2d1gg<_mQ%?u~~k%BtKvpITCz+vx&NHQ*Syzr12AY*yei#;L;@rHC>3pG)dJ>M##G zo+n*_q>GV|lxa@%xPz#qLUNB{B0Y~j>JZPNOfW7uaa|;(z-2BqvFUUUgl&{2||;3GdugZ#hVXn+YPN4>Zg`}z_` zZ1P7sK8C+NiXMDFI)Rm&9fwCJ=;&k^_f--do%BxqBvl#el-KV+e*k?i4&wwG{PQSL zeZu+CAWS?nll+(P^e;kK{>wx9 z7o|A~l{pDxISGs(OJyh zcY-=6i9b~74+B2}>SdtLgr*n4j9gjx^fN%mspoKy2$vxum?w#_KxYCvW`WKEodvp< zKzDqeT{+G*AkFI_QkRc2FID(e>5;dF7_u_NPy@v5C)sH{h$BCI<|o?V9D+&EE3}?j zoF@8%I4C~Md>K=Q9L!zjdXy55LCbyAseZ?)0o*ISr$JWG)Mrn=g^N#~Go2ns@m0-r z^x~id2qG$hqxQA}(_b$KuVEz- zk2;cZ0*gWtLMVkLL8Wj!Ee$aBTnT@9+DdYT!zmb4L=fg-A(7a0D+wF8SYy(v@F~p7 zdrjk*F9pyI5b&>%6m$lTB&q52@8T@?rd9+_letiRUTf=FTW>>by)+B~_+b**%64B5 zoAH7H5V|f4&=~Tls69elvjqst-ND$y$a5Kt2!^qIP|bQEBlRc_Z*1Kr z22^crZMYk@;Z_hUYnX6^Z@g8;<+@f`J*)sqG}v0&nZpqho&X)0H*q5u!jS}&ZNxdl z64dbJtt~g+9oFJx74HbOeCS;kAL^_Yq7$5exu}aUC5#EW3Lh?OZTavnxhb(_#FZG) zy-b1FKD=RM#3ijQBi`G1!A3EnUKy)SCuJ9V^UTaGuFL#3u`?44tWE5BF5=nEGCiL1rc<}i{e zpMhdn(ikIxJ;Xc&ay!rOcaei!&!rLzK|rLFI}k?NdKQQM_OYZgCbdFiQ$+ZUKZw*) zBtrmPO;`cfwi++u$}WN8wXOQaRY7D~kZ4zX_e&g7pqP*x7cb9M6NV-~V*0xj{hGEM z7+PWjU1Ge0H|ZE-Ir%`#(cdDZ-L3%E6-T6QCVDhwzS|o@`qFEP6&>qn5t~Mf!Z=kw zmTd@HA9A%GP!B5HPROnDer+Ki8wIVyOrmzM0&UWYw#IpvnrX0xbih_RSm|(8(qR>2 z{}zgeyD1qKa$yulgDVsZS`)|8b6B1(D0DiPbg2~!_dzTmD;BI+uwvnUiiO~6#e&}O z&#>cCLOdd<$C>o3f2Ij)nXpi6q1O6m?y-L+RZ)Lsz`b`Nf4*+WpGn~$L8K=MWs)!o za)S#;l*43=K`le;YW#rNjY)OjK=`J2Cba>t&EX%mHviPZ$1CxVp>@?6?0J@f?%mG5 zd&fX@Z3Ysq<6^-nagj?fqhcinGPJI`%{(iCECb!~T(Q+sM93P6Wo+$j6voV^_8e?z zUH5RLdg5HRbU|Ox*j>&`mL2Y5&L6)NK)3R7o#|XB@tV2oBwiQR z=L#F2iNlNaUUI7tNyin`bv=Lq#A$6K#Z_lsSE?;a!eLNEIJ%}32b)#GKesXaxmv3c zljV8a7S}c;{duMRUHB5ypR21f-%VhnYpo%X+vu8&uG#3?J&vv=s`qu1qHAmEp7_dr zGvwNIPa<7MtC^B=h9qZP?+5}>r#Jn9f~Q@q+~v2R=njm}p%l4Da0vruZsWeaYHR&S z%QRQ3du~YmoEzJEhnWjX0p>2iw!kiTi>jw|1ryxMbx+0}UB8iP><8e-xsqCQ6Kzld zM|KCdRm=)IRsm6>Dj?=>r~cA*&%*$8gQWhx?wUrsOjB*vak15C8*4_gORE_vT)d8= zdjy@{$JE}UHat{m>JLrZqYLJF}uiM_S_i!IL_Y^o|&t>U%(35)YHjq3}7gme*>&{}ebxSV?~^1w!h zr)q3%D2FDHbimB;FsF>`uCYbfO>}KiW^r~kvDFSbudx7QHSe#++zgAGOar2TI_`4B zX?P@pP@e4H8tpKq4&XfC9mdF`(pw2>joRquoStB@@*<6k+*AsZsqRa?ET~OEYC>aX zQ;=>;3X%=a*o32>4zZjiL8ku5Pv*MBGAgER6$W>+JyhO+u+$zZZx0%@hsw7+T(IwF z{xzK}xEkCU9PbwH3~+}n+*!D@aMu#DD>cdzk2+;U1xgPf z8Z!yBLO&j(w*X~7sHE>l{?#EXHb#INc4de0xPR$evR}aU#ymS}%(H`e?guK$R>#Qs zsab-#h|7w<(+G}jnk1_YV{pE4Ci$in9C6-oa(dKYTy0XSU8S? zuzxxy<{R|pLAO)v&=qw9JQ z28=J6Ht&g`4Dn($>q&}-V5Vx;jBJuI$80rgTg}>5v%ZxwYmDyGgu14 zkNN0qjq5eemjDrrz;+}pMY1b5UL+GMlC4O#BH4=MTaXiadqwiQd~{YMw-(8jT?-y_ zjXN1~S8lngCB3cH3a&gGmtTIBf?Irs=f!uhX|gs=_BQ!I{n01{nAf1%3FAS~yTqLf zCakR2aB0FyLsj4nz)Hqe2TKbZ*9;r=c}6Zhj9z~#nb0cH4geJeXT{)yrGc|3)CNiG&o)WUc+tjM5Y6M z!04RNh@lgx9ReY!1MqE9BgZxJ|0p#&jJrL*Fp7De`6S2OKUc}(&U62AMKtOMU*;Q` zP`>qt20wFtn+~p%A0);N5ylj*F4!F^Z8wY$fjV2-ZPNmW(Z=Jfd5P~t!O+iCZ2?t$ zMfz$lNP6*`AleJktile(W2+XlT?Oo0c3Bx*XgPhD5A%662%`E}`f*kpOr-oQ*1HN` zt7uSw=GhHq4mPl|kUn9HRc@HKUtxgRSv(3_(U+MGhWNROd+N+KjnS+?p91%EPgNnX zGg+egT>n-@>!qe_uTO*Xktund#A>a#8;WU``|@j5c&X z`DN@s^0z{jW#x!r!);2Igy}ZZc_|Wzu2@I(0q_?P8dycRM{DuWF&)<&Cuh;72{RV%zso z@MWwP7WM^C|2|dEqbx`+n)VNE70r)91V&^tl<$s{1ZE - Produced by OmniGraffle 6.6.2 2017-09-24 19:39:13 +0000Canvas 1Layer 1remote endpointwebsocketsWebSocketCommonProtocolapplication logicreaderStreamReaderwriterStreamWriterpingsdicttransfer_data_taskTasknetworkread_frameread_data_frameread_messagebytesframesdataframeswrite_framemessagesdequerecvsendpingpongclosecontrolframesbytesframes + Produced by OmniGraffle 6.6.2 2019-07-07 08:38:24 +0000Canvas 1Layer 1remote endpointwebsocketsWebSocketCommonProtocolapplication logicreaderStreamReaderwriterStreamWriterpingsdicttransfer_data_taskTasknetworkread_frameread_data_frameread_messagebytesframesdataframeswrite_framemessagesdequerecvsendpingpongclosecontrolframesbytesframes From 8afffd60f4fa8993f6d29965767dcedec4bfceb9 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 11:38:03 +0200 Subject: [PATCH 125/281] Update design document. It wasn't updated when fragmentation was implemented. Fix #642. --- docs/design.rst | 57 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/docs/design.rst b/docs/design.rst index 19cda16bb..75887d453 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -9,7 +9,7 @@ with the specification of the WebSocket protocol in :rfc:`6455`. It's primarily intended at maintainers. It may also be useful for users who wish to understand what happens under the hood. -.. warning: +.. warning:: Internals described in this document may change at any time. @@ -43,7 +43,7 @@ Transitions happen in the following places: close frame, this does the right thing regardless of which side started the :ref:`closing handshake `; also in :meth:`~protocol.WebSocketCommonProtocol.fail_connection` which duplicates - a few lines of code from `write_close_frame()` and `write_frame()`; + a few lines of code from ``write_close_frame()`` and ``write_frame()``; - ``* -> CLOSED``: in :meth:`~protocol.WebSocketCommonProtocol.connection_lost` which is always called exactly once when the TCP connection is closed. @@ -231,15 +231,17 @@ happens naturally in many use cases, it cannot be relied upon. Then :meth:`~protocol.WebSocketCommonProtocol.recv` fetches the next message from the :attr:`~protocol.WebSocketCommonProtocol.messages` queue, with some -complexity added for handling termination correctly. +complexity added for handling backpressure and termination correctly. Sending data ............ The right side of the diagram shows how ``websockets`` sends data. -:meth:`~protocol.WebSocketCommonProtocol.send` writes a single data frame -containing the message. Fragmentation isn't supported at this time. +:meth:`~protocol.WebSocketCommonProtocol.send` writes one or several data +frames containing the message. While sending a fragmented message, concurrent +calls to :meth:`~protocol.WebSocketCommonProtocol.send` are put on hold until +all fragments are sent. This makes concurrent calls safe. :meth:`~protocol.WebSocketCommonProtocol.ping` writes a ping frame and yields a :class:`~asyncio.Future` which will be completed when a matching pong @@ -420,8 +422,10 @@ words, they must be shielded from cancellation. :meth:`~protocol.WebSocketCommonProtocol.recv` waits for the next message in the queue or for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate, whichever comes first. It relies on :func:`~asyncio.wait` for -waiting on two tasks in parallel. As a consequence, even though it's waiting -on the transfer data task, it doesn't propagate cancellation to that task. +waiting on two futures in parallel. As a consequence, even though it's waiting +on a :class:`~asyncio.Future` signalling the next message and on +:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, it doesn't +propagate cancellation to them. :meth:`~protocol.WebSocketCommonProtocol.ensure_open` is called by :meth:`~protocol.WebSocketCommonProtocol.send`, @@ -535,18 +539,33 @@ For each connection, the sending side contains these buffers: Concurrency ----------- -Calling any combination of :meth:`~protocol.WebSocketCommonProtocol.recv`, +Awaiting any combination of :meth:`~protocol.WebSocketCommonProtocol.recv`, :meth:`~protocol.WebSocketCommonProtocol.send`, :meth:`~protocol.WebSocketCommonProtocol.close` :meth:`~protocol.WebSocketCommonProtocol.ping`, or -:meth:`~protocol.WebSocketCommonProtocol.pong` concurrently is safe, -including multiple calls to the same method. - -As shown above, receiving frames is independent from sending frames. That -isolates :meth:`~protocol.WebSocketCommonProtocol.recv`, which receives -frames, from the other methods, which send frames. - -Methods that send frames also support concurrent calls. While the connection -is open, each frame is sent with a single write. Combined with the concurrency -model of :mod:`asyncio`, this enforces serialization. After the connection is -closed, sending a frame raises :exc:`~websockets.exceptions.ConnectionClosed`. +:meth:`~protocol.WebSocketCommonProtocol.pong` concurrently is safe, including +multiple calls to the same method, with one exception and one limitation. + +* **Only one coroutine can receive messages at a time.** This constraint + avoids non-deterministic behavior (and simplifies the implementation). If a + coroutine is awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`, + awaiting it again in another coroutine raises :exc:`RuntimeError`. + +* **Sending a fragmented message forces serialization.** Indeed, the WebSocket + protocol doesn't support multiplexing messages. If a coroutine is awaiting + :meth:`~protocol.WebSocketCommonProtocol.send` to send a fragmented message, + awaiting it again in another coroutine waits until the first call completes. + This will be transparent in many cases. It may be a concern if the + fragmented message is generated slowly by an asynchronous iterator. + +Receiving frames is independent from sending frames. This isolates +:meth:`~protocol.WebSocketCommonProtocol.recv`, which receives frames, from +the other methods, which send frames. + +While the connection is open, each frame is sent with a single write. Combined +with the concurrency model of :mod:`asyncio`, this enforces serialization. The +only other requirement is to prevent interleaving other data frames in the +middle of a fragmented message. + +After the connection is closed, sending a frame raises +:exc:`~websockets.exceptions.ConnectionClosed`, which is safe. From 1585da2aa7da6f7984ac1c55a05784619cf974c4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 14:45:20 +0200 Subject: [PATCH 126/281] Improve exception hierarchy. * Group and sort exceptions from most common to least common. * Add a base WebSocketException. * Rename WebSocketProtocolError. * Document exception tree. Fix #270. --- docs/changelog.rst | 7 + docs/design.rst | 2 +- src/websockets/exceptions.py | 314 +++++++++++++++++++++-------------- src/websockets/framing.py | 26 +-- src/websockets/protocol.py | 10 +- tests/test_exceptions.py | 95 ++++++----- tests/test_framing.py | 20 +-- 7 files changed, 278 insertions(+), 196 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index c79f0f0dd..12fc57749 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -33,6 +33,13 @@ Changelog If you were setting ``max_queue=0`` to make the queue of incoming messages unbounded, change it to ``max_queue=None``. +.. note:: + + **Version 8.0 renames the** ``WebSocketProtocolError`` **exception** + :exc:`ProtocolError` **.** + + For backwards compatibility, a ``WebSocketProtocolError`` is provided. + .. note:: **Version 8.0 adds the reason phrase to the return type of the low-level diff --git a/docs/design.rst b/docs/design.rst index 75887d453..74279b87f 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -423,7 +423,7 @@ words, they must be shielded from cancellation. the queue or for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate, whichever comes first. It relies on :func:`~asyncio.wait` for waiting on two futures in parallel. As a consequence, even though it's waiting -on a :class:`~asyncio.Future` signalling the next message and on +on a :class:`~asyncio.Future` signaling the next message and on :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, it doesn't propagate cancellation to them. diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index ce2c1e64b..f03ab72f2 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -1,3 +1,32 @@ +""" +:mod:`websockets.exceptions` defines the following exception hierarchy: + +* :exc:`WebSocketException` + * :exc:`ConnectionClosed` + * :exc:`ConnectionClosedError` + * :exc:`ConnectionClosedOK` + * :exc:`InvalidHandshake` + * :exc:`SecurityError` + * :exc:`InvalidMessage` + * :exc:`InvalidHeader` + * :exc:`InvalidHeaderFormat` + * :exc:`InvalidHeaderValue` + * :exc:`InvalidOrigin` + * :exc:`InvalidUpgrade` + * :exc:`InvalidStatusCode` + * :exc:`NegotiationError` + * :exc:`DuplicateParameter` + * :exc:`InvalidParameterName` + * :exc:`InvalidParameterValue` + * :exc:`AbortHandshake` + * :exc:`RedirectHandshake` + * :exc:`InvalidState` + * :exc:`InvalidURI` + * :exc:`PayloadTooBig` + * :exc:`ProtocolError` + +""" + import http from typing import Optional @@ -5,88 +34,152 @@ __all__ = [ - "AbortHandshake", + "WebSocketException", "ConnectionClosed", "ConnectionClosedError", "ConnectionClosedOK", - "DuplicateParameter", "InvalidHandshake", + "SecurityError", + "InvalidMessage", "InvalidHeader", "InvalidHeaderFormat", "InvalidHeaderValue", - "InvalidMessage", "InvalidOrigin", + "InvalidUpgrade", + "InvalidStatusCode", + "NegotiationError", + "DuplicateParameter", "InvalidParameterName", "InvalidParameterValue", + "AbortHandshake", + "RedirectHandshake", "InvalidState", - "InvalidStatusCode", - "InvalidUpgrade", "InvalidURI", - "NegotiationError", "PayloadTooBig", - "RedirectHandshake", - "SecurityError", - "WebSocketProtocolError", + "ProtocolError", ] -class InvalidHandshake(Exception): +class WebSocketException(Exception): """ - Exception raised when a handshake request or response is invalid. + Base class for all exceptions defined by :mod:`websockets`. """ -class AbortHandshake(InvalidHandshake): +CLOSE_CODES = { + 1000: "OK", + 1001: "going away", + 1002: "protocol error", + 1003: "unsupported type", + # 1004 is reserved + 1005: "no status code [internal]", + 1006: "connection closed abnormally [internal]", + 1007: "invalid data", + 1008: "policy violation", + 1009: "message too big", + 1010: "extension required", + 1011: "unexpected error", + 1015: "TLS failure [internal]", +} + + +def format_close(code: int, reason: str) -> str: """ - Exception raised to abort a handshake and return a HTTP response. + Display a human-readable version of the close code and reason. """ + if 3000 <= code < 4000: + explanation = "registered" + elif 4000 <= code < 5000: + explanation = "private use" + else: + explanation = CLOSE_CODES.get(code, "unknown") + result = f"code = {code} ({explanation}), " - def __init__( - self, status: http.HTTPStatus, headers: HeadersLike, body: bytes = b"" - ) -> None: - self.status = status - self.headers = Headers(headers) - self.body = body - message = f"HTTP {status}, {len(self.headers)} headers, {len(body)} bytes" + if reason: + result += f"reason = {reason}" + else: + result += "no reason" + + return result + + +class ConnectionClosed(WebSocketException): + """ + Raised when trying to interact with a closed connection. + + Provides the connection close code and reason in its ``code`` and + ``reason`` attributes respectively. + + """ + + def __init__(self, code: int, reason: str) -> None: + self.code = code + self.reason = reason + message = "WebSocket connection is closed: " + message += format_close(code, reason) super().__init__(message) -class SecurityError(InvalidHandshake): +class ConnectionClosedError(ConnectionClosed): """ - Exception raised when a HTTP request or response breaks security rules. + Like :exc:`ConnectionClosed`, when the connection terminated with an error. + + This means the close code is different from 1000 (OK) and 1001 (going away). """ + def __init__(self, code: int, reason: str) -> None: + assert code != 1000 and code != 1001 + super().__init__(code, reason) + -class RedirectHandshake(InvalidHandshake): +class ConnectionClosedOK(ConnectionClosed): """ - Exception raised when a handshake gets redirected. + Like :exc:`ConnectionClosed`, when the connection terminated properly. + + This means the close code is 1000 (OK) or 1001 (going away). """ - def __init__(self, uri: str) -> None: - self.uri = uri + def __init__(self, code: int, reason: str) -> None: + assert code == 1000 or code == 1001 + super().__init__(code, reason) - def __str__(self) -> str: - return f"redirect to {self.uri}" + +class InvalidHandshake(WebSocketException): + """ + Raised during the handshake when the WebSocket connection fails. + + """ + + +class SecurityError(InvalidHandshake): + """ + Raised when a handshake request or response breaks a security rule. + + Security limits are hard coded. + + """ class InvalidMessage(InvalidHandshake): """ - Exception raised when the HTTP message in a handshake request is malformed. + Raised when a handshake request or response is malformed. """ class InvalidHeader(InvalidHandshake): """ - Exception raised when a HTTP header doesn't have a valid format or value. + Raised when a HTTP header doesn't have a valid format or value. """ def __init__(self, name: str, value: Optional[str] = None) -> None: + self.name = name + self.value = value if value is None: message = f"missing {name} header" elif value == "": @@ -98,32 +191,30 @@ def __init__(self, name: str, value: Optional[str] = None) -> None: class InvalidHeaderFormat(InvalidHeader): """ - Exception raised when a Sec-WebSocket-* HTTP header cannot be parsed. + Raised when a HTTP header cannot be parsed. + + The format of the header doesn't match the grammar for that header. """ def __init__(self, name: str, error: str, header: str, pos: int) -> None: + self.name = name error = f"{error} at {pos} in {header}" super().__init__(name, error) class InvalidHeaderValue(InvalidHeader): """ - Exception raised when a Sec-WebSocket-* HTTP header has a wrong value. - - """ + Raised when a HTTP header has a wrong value. - -class InvalidUpgrade(InvalidHeader): - """ - Exception raised when a Upgrade or Connection header isn't correct. + The format of the header is correct but a value isn't acceptable. """ class InvalidOrigin(InvalidHeader): """ - Exception raised when the Origin header in a request isn't allowed. + Raised when the Origin header in a request isn't allowed. """ @@ -131,11 +222,18 @@ def __init__(self, origin: Optional[str]) -> None: super().__init__("Origin", origin) +class InvalidUpgrade(InvalidHeader): + """ + Raised when the Upgrade or Connection header isn't correct. + + """ + + class InvalidStatusCode(InvalidHandshake): """ - Exception raised when a handshake response status code is invalid. + Raised when a handshake response status code is invalid. - Provides the integer status code in its ``status_code`` attribute. + The integer status code is available in the ``status_code`` attribute. """ @@ -147,139 +245,102 @@ def __init__(self, status_code: int) -> None: class NegotiationError(InvalidHandshake): """ - Exception raised when negotiating an extension fails. + Raised when negotiating an extension fails. """ -class InvalidParameterName(NegotiationError): +class DuplicateParameter(NegotiationError): """ - Exception raised when a parameter name in an extension header is invalid. + Raised when a parameter name is repeated in an extension header. """ def __init__(self, name: str) -> None: self.name = name - message = f"invalid parameter name: {name}" + message = f"duplicate parameter: {name}" super().__init__(message) -class InvalidParameterValue(NegotiationError): +class InvalidParameterName(NegotiationError): """ - Exception raised when a parameter value in an extension header is invalid. + Raised when a parameter name in an extension header is invalid. """ - def __init__(self, name: str, value: Optional[str]) -> None: + def __init__(self, name: str) -> None: self.name = name - self.value = value - message = f"invalid value for parameter {name}: {value}" + message = f"invalid parameter name: {name}" super().__init__(message) -class DuplicateParameter(NegotiationError): +class InvalidParameterValue(NegotiationError): """ - Exception raised when a parameter name is repeated in an extension header. + Raised when a parameter value in an extension header is invalid. """ - def __init__(self, name: str) -> None: + def __init__(self, name: str, value: Optional[str]) -> None: self.name = name - message = f"duplicate parameter: {name}" + self.value = value + if value is None: + message = f"missing value for parameter {name}" + elif value == "": + message = f"empty value for parameter {name}" + else: + message = f"invalid value for parameter {name}: {value}" super().__init__(message) -class InvalidState(Exception): +class AbortHandshake(InvalidHandshake): """ - Exception raised when an operation is forbidden in the current state. + Raised to abort the handshake on purpose and return a HTTP response. - """ + This exception is an implementation detail. + The public API is :meth:`~server.WebSocketServerProtocol.process_request`. -CLOSE_CODES = { - 1000: "OK", - 1001: "going away", - 1002: "protocol error", - 1003: "unsupported type", - # 1004 is reserved - 1005: "no status code [internal]", - 1006: "connection closed abnormally [internal]", - 1007: "invalid data", - 1008: "policy violation", - 1009: "message too big", - 1010: "extension required", - 1011: "unexpected error", - 1015: "TLS failure [internal]", -} - - -def format_close(code: int, reason: str) -> str: """ - Display a human-readable version of the close code and reason. - """ - if 3000 <= code < 4000: - explanation = "registered" - elif 4000 <= code < 5000: - explanation = "private use" - else: - explanation = CLOSE_CODES.get(code, "unknown") - result = f"code = {code} ({explanation}), " - - if reason: - result += f"reason = {reason}" - else: - result += "no reason" - - return result + def __init__( + self, status: http.HTTPStatus, headers: HeadersLike, body: bytes = b"" + ) -> None: + self.status = status + self.headers = Headers(headers) + self.body = body + message = f"HTTP {status}, {len(self.headers)} headers, {len(body)} bytes" + super().__init__(message) -class ConnectionClosed(InvalidState): +class RedirectHandshake(InvalidHandshake): """ - Exception raised when trying to read or write on a closed connection. + Raised when a handshake gets redirected. - Provides the connection close code and reason in its ``code`` and - ``reason`` attributes respectively. + This exception is an implementation detail. """ - def __init__(self, code: int, reason: str) -> None: - self.code = code - self.reason = reason - message = "WebSocket connection is closed: " - message += format_close(code, reason) - super().__init__(message) - + def __init__(self, uri: str) -> None: + self.uri = uri -class ConnectionClosedError(ConnectionClosed): - """ - Like :exc:`ConnectionClosed`, when the connection terminated with an error. + def __str__(self) -> str: + return f"redirect to {self.uri}" - This means the close code is different from 1000 (OK) and 1001 (going away). +class InvalidState(WebSocketException, AssertionError): """ + Raised when an operation is forbidden in the current state. - def __init__(self, code: int, reason: str) -> None: - assert code != 1000 and code != 1001 - super().__init__(code, reason) - + This exception is an implementation detail. -class ConnectionClosedOK(ConnectionClosed): - """ - Like :exc:`ConnectionClosed`, when the connection terminated properly. - - This means the close code is 1000 (OK) or 1001 (going away). + It should never be raised in normal circumstances. """ - def __init__(self, code: int, reason: str) -> None: - assert code == 1000 or code == 1001 - super().__init__(code, reason) - -class InvalidURI(Exception): +class InvalidURI(WebSocketException): """ - Exception raised when an URI isn't a valid websocket URI. + Raised when connecting to an URI that isn't a valid WebSocket URI. """ @@ -289,15 +350,18 @@ def __init__(self, uri: str) -> None: super().__init__(message) -class PayloadTooBig(Exception): +class PayloadTooBig(WebSocketException): """ - Exception raised when a frame's payload exceeds the maximum size. + Raised when receiving a frame with a payload exceeding the maximum size. """ -class WebSocketProtocolError(Exception): +class ProtocolError(WebSocketException): """ - Internal exception raised when the remote side breaks the protocol. + Raised when the other side breaks the protocol. """ + + +WebSocketProtocolError = ProtocolError # for backwards compatibility diff --git a/src/websockets/framing.py b/src/websockets/framing.py index ec87665ef..478a7b05a 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -15,7 +15,7 @@ import struct from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple -from .exceptions import PayloadTooBig, WebSocketProtocolError +from .exceptions import PayloadTooBig, ProtocolError from .typing import Data @@ -113,7 +113,7 @@ async def read( in reverse order :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds ``max_size`` - :raises ~websockets.exceptions.WebSocketProtocolError: if the frame + :raises ~websockets.exceptions.ProtocolError: if the frame contains incorrect values """ @@ -129,7 +129,7 @@ async def read( opcode = head1 & 0b00001111 if (True if head2 & 0b10000000 else False) != mask: - raise WebSocketProtocolError("incorrect masking") + raise ProtocolError("incorrect masking") length = head2 & 0b01111111 if length == 126: @@ -178,7 +178,7 @@ def write( :param extensions: list of classes with an ``encode()`` method that transform the frame and return a new frame; extensions are applied in order - :raises ~websockets.exceptions.WebSocketProtocolError: if the frame + :raises ~websockets.exceptions.ProtocolError: if the frame contains incorrect values """ @@ -235,7 +235,7 @@ def check(frame) -> None: """ Check that reserved bits and opcode have acceptable values. - :raises ~websockets.exceptions.WebSocketProtocolError: if a reserved + :raises ~websockets.exceptions.ProtocolError: if a reserved bit or the opcode is invalid """ @@ -243,17 +243,17 @@ def check(frame) -> None: # but it's the instance of class to which this method is bound. if frame.rsv1 or frame.rsv2 or frame.rsv3: - raise WebSocketProtocolError("reserved bits must be 0") + raise ProtocolError("reserved bits must be 0") if frame.opcode in DATA_OPCODES: return elif frame.opcode in CTRL_OPCODES: if len(frame.data) > 125: - raise WebSocketProtocolError("control frame too long") + raise ProtocolError("control frame too long") if not frame.fin: - raise WebSocketProtocolError("fragmented control frame") + raise ProtocolError("fragmented control frame") else: - raise WebSocketProtocolError(f"invalid opcode: {frame.opcode}") + raise ProtocolError(f"invalid opcode: {frame.opcode}") def prepare_data(data: Data) -> Tuple[int, bytes]: @@ -314,7 +314,7 @@ def parse_close(data: bytes) -> Tuple[int, str]: Return ``(code, reason)``. - :raises ~websockets.exceptions.WebSocketProtocolError: if data is ill-formed + :raises ~websockets.exceptions.ProtocolError: if data is ill-formed :raises UnicodeDecodeError: if the reason isn't valid UTF-8 """ @@ -328,7 +328,7 @@ def parse_close(data: bytes) -> Tuple[int, str]: return 1005, "" else: assert length == 1 - raise WebSocketProtocolError("close frame too short") + raise ProtocolError("close frame too short") def serialize_close(code: int, reason: str) -> bytes: @@ -346,12 +346,12 @@ def check_close(code: int) -> None: """ Check that the close code has an acceptable value for a close frame. - :raises ~websockets.exceptions.WebSocketProtocolError: if the close code + :raises ~websockets.exceptions.ProtocolError: if the close code is invalid """ if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): - raise WebSocketProtocolError("invalid status code") + raise ProtocolError("invalid status code") # at the bottom to allow circular import, because Extension depends on Frame diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 7d1560927..42ddf0763 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -35,7 +35,7 @@ ConnectionClosedOK, InvalidState, PayloadTooBig, - WebSocketProtocolError, + ProtocolError, ) from .extensions.base import Extension from .framing import * @@ -811,7 +811,7 @@ async def transfer_data(self) -> None: # twice and failing the connection again. raise - except WebSocketProtocolError as exc: + except ProtocolError as exc: self.transfer_data_exc = exc self.fail_connection(1002) @@ -861,7 +861,7 @@ async def read_message(self) -> Optional[Data]: elif frame.opcode == OP_BINARY: text = False else: # frame.opcode == OP_CONT - raise WebSocketProtocolError("unexpected opcode") + raise ProtocolError("unexpected opcode") # Shortcut for the common case - no fragmentation if frame.fin: @@ -906,9 +906,9 @@ def append(frame: Frame) -> None: while not frame.fin: frame = await self.read_data_frame(max_size=max_size) if frame is None: - raise WebSocketProtocolError("incomplete fragmented message") + raise ProtocolError("incomplete fragmented message") if frame.opcode != OP_CONT: - raise WebSocketProtocolError("unexpected opcode") + raise ProtocolError("unexpected opcode") append(frame) # mypy cannot figure out that chunks have the proper type. diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 2cbd78671..72b1076ab 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -9,21 +9,46 @@ def test_str(self): for exception, exception_str in [ # fmt: off ( - InvalidHandshake("invalid request"), - "invalid request", + WebSocketException("something went wrong"), + "something went wrong", ), ( - AbortHandshake(200, Headers(), b"OK\n"), - "HTTP 200, 0 headers, 3 bytes", + ConnectionClosed(1000, ""), + "WebSocket connection is closed: code = 1000 " + "(OK), no reason", ), ( - SecurityError("redirect from WSS to WS"), - "redirect from WSS to WS", - + ConnectionClosed(1006, None), + "WebSocket connection is closed: code = 1006 " + "(connection closed abnormally [internal]), no reason" ), ( - RedirectHandshake("wss://example.com"), - "redirect to wss://example.com", + ConnectionClosed(3000, None), + "WebSocket connection is closed: code = 3000 " + "(registered), no reason" + ), + ( + ConnectionClosed(4000, None), + "WebSocket connection is closed: code = 4000 " + "(private use), no reason" + ), + ( + ConnectionClosedError(1016, None), + "WebSocket connection is closed: code = 1016 " + "(unknown), no reason" + ), + ( + ConnectionClosedOK(1001, "bye"), + "WebSocket connection is closed: code = 1001 " + "(going away), reason = bye", + ), + ( + InvalidHandshake("invalid request"), + "invalid request", + ), + ( + SecurityError("redirect from WSS to WS"), + "redirect from WSS to WS", ), ( InvalidMessage("malformed HTTP message"), @@ -56,6 +81,10 @@ def test_str(self): InvalidHeaderValue("Sec-WebSocket-Version", "42"), "invalid Sec-WebSocket-Version header: 42", ), + ( + InvalidOrigin("http://bad.origin"), + "invalid Origin header: http://bad.origin", + ), ( InvalidUpgrade("Upgrade"), "missing Upgrade header", @@ -64,10 +93,6 @@ def test_str(self): InvalidUpgrade("Connection", "websocket"), "invalid Connection header: websocket", ), - ( - InvalidOrigin("http://bad.origin"), - "invalid Origin header: http://bad.origin", - ), ( InvalidStatusCode(403), "server rejected WebSocket connection: HTTP 403", @@ -76,51 +101,37 @@ def test_str(self): NegotiationError("unsupported subprotocol: spam"), "unsupported subprotocol: spam", ), - ( - InvalidParameterName("|"), - "invalid parameter name: |", - ), - ( - InvalidParameterValue("a", "|"), - "invalid value for parameter a: |", - ), ( DuplicateParameter("a"), "duplicate parameter: a", ), ( - InvalidState("WebSocket connection isn't established yet"), - "WebSocket connection isn't established yet", + InvalidParameterName("|"), + "invalid parameter name: |", ), ( - ConnectionClosed(1000, ""), - "WebSocket connection is closed: code = 1000 " - "(OK), no reason", + InvalidParameterValue("a", None), + "missing value for parameter a", ), ( - ConnectionClosedOK(1001, "bye"), - "WebSocket connection is closed: code = 1001 " - "(going away), reason = bye", + InvalidParameterValue("a", ""), + "empty value for parameter a", ), ( - ConnectionClosed(1006, None), - "WebSocket connection is closed: code = 1006 " - "(connection closed abnormally [internal]), no reason" + InvalidParameterValue("a", "|"), + "invalid value for parameter a: |", ), ( - ConnectionClosedError(1016, None), - "WebSocket connection is closed: code = 1016 " - "(unknown), no reason" + AbortHandshake(200, Headers(), b"OK\n"), + "HTTP 200, 0 headers, 3 bytes", ), ( - ConnectionClosed(3000, None), - "WebSocket connection is closed: code = 3000 " - "(registered), no reason" + RedirectHandshake("wss://example.com"), + "redirect to wss://example.com", ), ( - ConnectionClosed(4000, None), - "WebSocket connection is closed: code = 4000 " - "(private use), no reason" + InvalidState("WebSocket connection isn't established yet"), + "WebSocket connection isn't established yet", ), ( InvalidURI("|"), @@ -131,7 +142,7 @@ def test_str(self): "payload length exceeds limit: 2 > 1 bytes", ), ( - WebSocketProtocolError("invalid opcode: 7"), + ProtocolError("invalid opcode: 7"), "invalid opcode: 7", ), # fmt: on diff --git a/tests/test_framing.py b/tests/test_framing.py index 430faf6e1..9e6f1871d 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -3,7 +3,7 @@ import unittest import unittest.mock -from websockets.exceptions import PayloadTooBig, WebSocketProtocolError +from websockets.exceptions import PayloadTooBig, ProtocolError from websockets.framing import * from .utils import AsyncioTestCase @@ -112,7 +112,7 @@ def test_payload_too_big(self): def test_bad_reserved_bits(self): for encoded in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: with self.subTest(encoded=encoded): - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(encoded) def test_good_opcode(self): @@ -125,26 +125,26 @@ def test_bad_opcode(self): for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)): encoded = bytes([0x80 | opcode, 0]) with self.subTest(encoded=encoded): - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(encoded) def test_mask_flag(self): # Mask flag correctly set. self.decode(b"\x80\x80\x00\x00\x00\x00", mask=True) # Mask flag incorrectly unset. - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(b"\x80\x80\x00\x00\x00\x00") # Mask flag correctly unset. self.decode(b"\x80\x00") # Mask flag incorrectly set. - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(b"\x80\x00", mask=True) def test_control_frame_max_length(self): # At maximum allowed length. self.decode(b"\x88\x7e\x00\x7d" + 125 * b"a") # Above maximum allowed length. - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a") def test_prepare_data_str(self): @@ -201,7 +201,7 @@ def test_fragmented_control_frame(self): # Fin bit correctly set. self.decode(b"\x88\x00") # Fin bit incorrectly unset. - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): self.decode(b"\x08\x00") def test_parse_close_and_serialize_close(self): @@ -212,15 +212,15 @@ def test_parse_close_empty(self): self.assertEqual(parse_close(b""), (1005, "")) def test_parse_close_errors(self): - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): parse_close(b"\x03") - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): parse_close(b"\x03\xe7") with self.assertRaises(UnicodeDecodeError): parse_close(b"\x03\xe8\xff\xff") def test_serialize_close_errors(self): - with self.assertRaises(WebSocketProtocolError): + with self.assertRaises(ProtocolError): serialize_close(999, "") def test_extensions(self): From c3681322989aab7c49b3bf94082690764f10c0a2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 16:56:06 +0200 Subject: [PATCH 127/281] Use a plain dict to store pings. This is possible since Python 3.6 because dict preserves order. Also remove dependency on binascii for converting bytes to hex with bytes.hex() which is available since Python 3.5. Fix #645. --- src/websockets/protocol.py | 42 ++++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 42ddf0763..ef935caf5 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -8,7 +8,6 @@ """ import asyncio -import binascii import codecs import collections import enum @@ -22,6 +21,7 @@ AsyncIterator, Awaitable, Deque, + Dict, Iterable, List, Optional, @@ -274,9 +274,7 @@ def __init__( self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None # Mapping of ping IDs to waiters, in chronological order. - self.pings: collections.OrderedDict[ - bytes, asyncio.Future[None] - ] = collections.OrderedDict() + self.pings: Dict[bytes, asyncio.Future[None]] = {} # Task running the data transfer. self.transfer_data_task: asyncio.Task[None] @@ -954,23 +952,29 @@ async def read_data_frame(self, max_size: int) -> Optional[Frame]: elif frame.opcode == OP_PONG: # Acknowledge pings on solicited pongs. if frame.data in self.pings: + logger.debug( + "%s - received solicited pong: %s", + self.side, + frame.data.hex() or "[empty]", + ) # Acknowledge all pings up to the one matching this pong. ping_id = None ping_ids = [] - while ping_id != frame.data: - ping_id, pong_waiter = self.pings.popitem(last=False) + for ping_id, ping in self.pings.items(): ping_ids.append(ping_id) - if not pong_waiter.done(): - pong_waiter.set_result(None) - pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" - logger.debug( - "%s - received solicited pong: %s", self.side, pong_hex - ) + if not ping.done(): + ping.set_result(None) + if ping_id == frame.data: + break + else: # pragma: no cover + assert False, "ping_id is in self.pings" + # Remove acknowledged pings from self.pings. + for ping_id in ping_ids: + del self.pings[ping_id] ping_ids = ping_ids[:-1] if ping_ids: pings_hex = ", ".join( - binascii.hexlify(ping_id).decode() or "[empty]" - for ping_id in ping_ids + ping_id.hex() or "[empty]" for ping_id in ping_ids ) plural = "s" if len(ping_ids) > 1 else "" logger.debug( @@ -980,9 +984,10 @@ async def read_data_frame(self, max_size: int) -> Optional[Frame]: pings_hex, ) else: - pong_hex = binascii.hexlify(frame.data).decode() or "[empty]" logger.debug( - "%s - received unsolicited pong: %s", self.side, pong_hex + "%s - received unsolicited pong: %s", + self.side, + frame.data.hex() or "[empty]", ) # 5.6. Data Frames @@ -1259,10 +1264,7 @@ def abort_pings(self) -> None: ping.cancel() if self.pings: - pings_hex = ", ".join( - binascii.hexlify(ping_id).decode() or "[empty]" - for ping_id in self.pings - ) + pings_hex = ", ".join(ping_id.hex() or "[empty]" for ping_id in self.pings) plural = "s" if len(self.pings) > 1 else "" logger.debug( "%s - aborted pending ping%s: %s", self.side, plural, pings_hex From 31ba3fad91a6add437a02a369d282683d5333840 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 17:28:53 +0200 Subject: [PATCH 128/281] Add a task to build the C extension. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Declare all tasks as phony — this is only really necessary for build, but it can't hurt. --- Makefile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Makefile b/Makefile index d389623a7..c06de468e 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,5 @@ +.PHONY: default style test coverage build clean + export PYTHONASYNCIODEBUG=1 export PYTHONPATH=src @@ -16,6 +18,9 @@ coverage: python -m coverage html python -m coverage report --show-missing --fail-under=100 +build: + python setup.py build_ext --inplace + clean: find . -name '*.pyc' -o -name '*.so' -delete find . -name __pycache__ -delete From c01ae626a30891b2302b5b2df80296b5345f118a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 17:30:47 +0200 Subject: [PATCH 129/281] Run all quality checks by default with make. --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index c06de468e..d9e16fefe 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,8 @@ export PYTHONASYNCIODEBUG=1 export PYTHONPATH=src +default: coverage style + style: isort --recursive src tests black src tests From d8a3a98bddedb1949d8da3c902fecdf7ce020c50 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 17:26:17 +0200 Subject: [PATCH 130/281] Deprecate host, port and secure attrs of protocols. Also factor out logic for testing deprecations. Fix #644. --- docs/api.rst | 6 +-- docs/changelog.rst | 9 ++++ src/websockets/client.py | 6 +-- src/websockets/protocol.py | 42 ++++++++++++----- tests/test_client_server.py | 93 +++++++++++++++++++++---------------- tests/utils.py | 11 +++++ 6 files changed, 108 insertions(+), 59 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 28f41cc40..d265a91c2 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -44,7 +44,7 @@ Server .. automethod:: wait_closed .. autoattribute:: sockets - .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) + .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) .. automethod:: handshake .. automethod:: process_request @@ -61,7 +61,7 @@ Client .. autofunction:: unix_connect(path, uri="ws://localhost/", *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) :async: - .. autoclass:: WebSocketClientProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) + .. autoclass:: WebSocketClientProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) .. automethod:: handshake @@ -70,7 +70,7 @@ Shared .. automodule:: websockets.protocol - .. autoclass:: WebSocketCommonProtocol(*, host=None, port=None, secure=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) + .. autoclass:: WebSocketCommonProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) .. automethod:: close .. automethod:: wait_closed diff --git a/docs/changelog.rst b/docs/changelog.rst index 12fc57749..cfad4a5b5 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -33,6 +33,15 @@ Changelog If you were setting ``max_queue=0`` to make the queue of incoming messages unbounded, change it to ``max_queue=None``. +.. note:: + + **Version 8.0 deprecates the** ``host`` **,** ``port`` **, and** ``secure`` + **attributes of** :class:`~protocol.WebSocketCommonProtocol`. + + Use :attr:`~protocol.WebSocketCommonProtocol.local_address` in servers and + :attr:`~protocol.WebSocketCommonProtocol.remote_address` in clients + instead of ``host`` and ``port``. + .. note:: **Version 8.0 renames the** ``WebSocketProtocolError`` **exception** diff --git a/src/websockets/client.py b/src/websockets/client.py index 89a624511..4d4a04cb8 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -437,9 +437,6 @@ def __init__( factory = functools.partial( create_protocol, - host=wsuri.host, - port=wsuri.port, - secure=wsuri.secure, ping_interval=ping_interval, ping_timeout=ping_timeout, close_timeout=close_timeout, @@ -448,6 +445,9 @@ def __init__( read_limit=read_limit, write_limit=write_limit, loop=loop, + host=wsuri.host, + port=wsuri.port, + secure=wsuri.secure, legacy_recv=legacy_recv, origin=origin, extensions=extensions, diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index ef935caf5..77dad5e1d 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -92,11 +92,6 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception when the connection is closed with any other code. - When initializing a :class:`WebSocketCommonProtocol`, the ``host``, - ``port``, and ``secure`` parameters are stored as attributes for backwards - compatibility. Consider using :attr:`local_address` on the server side and - :attr:`remote_address` on the client side instead. - Once the connection is open, a `Ping frame`_ is sent every ``ping_interval`` seconds. This serves as a keepalive. It helps keeping the connection open, especially in the presence of proxies with short @@ -185,9 +180,6 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): def __init__( self, *, - host: Optional[str] = None, - port: Optional[int] = None, - secure: Optional[bool] = None, ping_interval: float = 20, ping_timeout: float = 20, close_timeout: Optional[float] = None, @@ -196,6 +188,10 @@ def __init__( read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, + # The following arguments are kept only for backwards compatibility. + host: Optional[str] = None, + port: Optional[int] = None, + secure: Optional[bool] = None, legacy_recv: bool = False, timeout: Optional[float] = None, ) -> None: @@ -208,9 +204,6 @@ def __init__( if close_timeout is None: close_timeout = timeout - self.host = host - self.port = port - self.secure = secure self.ping_interval = ping_interval self.ping_timeout = ping_timeout self.close_timeout = close_timeout @@ -225,6 +218,9 @@ def __init__( loop = asyncio.get_event_loop() self.loop = loop + self._host = host + self._port = port + self._secure = secure self.legacy_recv = legacy_recv # Configure read buffer limits. The high-water limit is defined by @@ -320,6 +316,23 @@ def connection_open(self) -> None: # Start the task that eventually closes the TCP connection. self.close_connection_task = self.loop.create_task(self.close_connection()) + @property + def host(self) -> Optional[str]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning) + return self._host + + @property + def port(self) -> Optional[int]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning) + return self._port + + @property + def secure(self) -> Optional[bool]: + warnings.warn(f"don't use secure", DeprecationWarning) + return self._secure + # Public API @property @@ -1144,7 +1157,12 @@ async def close_connection(self) -> None: # If connection_lost() was called, the TCP connection is closed. # However, if TLS is enabled, the transport still needs closing. # Else asyncio complains: ResourceWarning: unclosed transport. - if self.connection_lost_waiter.done() and not self.secure: + try: + writer_is_closing = self.writer.is_closing # type: ignore + except AttributeError: # pragma: no cover + # Python < 3.7 + writer_is_closing = self.writer.transport.is_closing + if self.connection_lost_waiter.done() and writer_is_closing(): return # Close the TCP connection. Buffers are flushed asynchronously. diff --git a/tests/test_client_server.py b/tests/test_client_server.py index aa4bebdc2..e74ec6bf6 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -51,7 +51,8 @@ async def handler(ws, path): - if path == "/attributes": + if path == "/deprecated_attributes": + await ws.recv() # delay that allows catching warnings await ws.send(repr((ws.host, ws.port, ws.secure))) elif path == "/close_timeout": await ws.send(repr(ws.close_timeout)) @@ -238,7 +239,7 @@ def setUp(self): def server_context(self): return None - def start_server(self, expected_warning=None, **kwargs): + def start_server(self, deprecation_warnings=None, **kwargs): # Disable compression by default in tests. kwargs.setdefault("compression", None) # Disable pings by default in tests. @@ -248,13 +249,8 @@ def start_server(self, expected_warning=None, **kwargs): start_server = serve(handler, "localhost", 0, **kwargs) self.server = self.loop.run_until_complete(start_server) - if expected_warning is None: - self.assertEqual(len(recorded_warnings), 0) - else: - self.assertEqual(len(recorded_warnings), 1) - actual_warning = recorded_warnings[0].message - self.assertEqual(str(actual_warning), expected_warning) - self.assertEqual(type(actual_warning), DeprecationWarning) + expected_warnings = [] if deprecation_warnings is None else deprecation_warnings + self.assertDeprecationWarnings(recorded_warnings, expected_warnings) def start_redirecting_server( self, status, include_location=True, force_insecure=False @@ -278,7 +274,7 @@ async def process_request(path, headers): self.redirecting_server = self.loop.run_until_complete(start_server) def start_client( - self, resource_name="/", user_info=None, expected_warning=None, **kwargs + self, resource_name="/", user_info=None, deprecation_warnings=None, **kwargs ): # Disable compression by default in tests. kwargs.setdefault("compression", None) @@ -295,13 +291,8 @@ def start_client( start_client = connect(server_uri, **kwargs) self.client = self.loop.run_until_complete(start_client) - if expected_warning is None: - self.assertEqual(len(recorded_warnings), 0) - else: - self.assertEqual(len(recorded_warnings), 1) - actual_warning = recorded_warnings[0].message - self.assertEqual(str(actual_warning), expected_warning) - self.assertEqual(type(actual_warning), DeprecationWarning) + expected_warnings = [] if deprecation_warnings is None else deprecation_warnings + self.assertDeprecationWarnings(recorded_warnings, expected_warnings) def stop_client(self): try: @@ -539,10 +530,9 @@ def test_process_request_argument_backwards_compatibility(self): with contextlib.closing(response): self.assertEqual(response.code, 200) - self.assertEqual(len(recorded_warnings), 1) - warning = recorded_warnings[0].message - self.assertEqual(str(warning), "declare process_request as a coroutine") - self.assertEqual(type(warning), DeprecationWarning) + self.assertDeprecationWarnings( + recorded_warnings, ["declare process_request as a coroutine"] + ) class ProcessRequestOKServerProtocol(WebSocketServerProtocol): async def process_request(self, path, request_headers): @@ -567,10 +557,9 @@ def test_process_request_override_backwards_compatibility(self): with contextlib.closing(response): self.assertEqual(response.code, 200) - self.assertEqual(len(recorded_warnings), 1) - warning = recorded_warnings[0].message - self.assertEqual(str(warning), "declare process_request as a coroutine") - self.assertEqual(type(warning), DeprecationWarning) + self.assertDeprecationWarnings( + recorded_warnings, ["declare process_request as a coroutine"] + ) def select_subprotocol_chat(client_subprotocols, server_subprotocols): return "chat" @@ -599,18 +588,37 @@ def test_select_subprotocol_override(self): self.assertEqual(self.client.subprotocol, "chat") @with_server() - @with_client("/attributes") - def test_protocol_attributes(self): + @with_client("/deprecated_attributes") + def test_protocol_deprecated_attributes(self): # The test could be connecting with IPv6 or IPv4. expected_client_attrs = [ server_socket.getsockname()[:2] + (self.secure,) for server_socket in self.server.sockets ] - client_attrs = (self.client.host, self.client.port, self.client.secure) + with warnings.catch_warnings(record=True) as recorded_warnings: + client_attrs = (self.client.host, self.client.port, self.client.secure) + self.assertDeprecationWarnings( + recorded_warnings, + [ + "use remote_address[0] instead of host", + "use remote_address[1] instead of port", + "don't use secure", + ], + ) self.assertIn(client_attrs, expected_client_attrs) expected_server_attrs = ("localhost", 0, self.secure) - server_attrs = self.loop.run_until_complete(self.client.recv()) + with warnings.catch_warnings(record=True) as recorded_warnings: + self.loop.run_until_complete(self.client.send("")) + server_attrs = self.loop.run_until_complete(self.client.recv()) + self.assertDeprecationWarnings( + recorded_warnings, + [ + "use local_address[0] instead of host", + "use local_address[1] instead of port", + "don't use secure", + ], + ) self.assertEqual(server_attrs, repr(expected_server_attrs)) @with_server() @@ -770,7 +778,7 @@ def test_server_create_protocol_function(self): @with_server( klass=UnauthorizedServerProtocol, - expected_warning="rename klass to create_protocol", + deprecation_warnings=["rename klass to create_protocol"], ) def test_server_klass_backwards_compatibility(self): self.assert_client_raises_code(401) @@ -778,7 +786,7 @@ def test_server_klass_backwards_compatibility(self): @with_server( create_protocol=ForbiddenServerProtocol, klass=UnauthorizedServerProtocol, - expected_warning="rename klass to create_protocol", + deprecation_warnings=["rename klass to create_protocol"], ) def test_server_create_protocol_over_klass(self): self.assert_client_raises_code(403) @@ -800,7 +808,7 @@ def test_client_create_protocol_function(self): @with_client( "/path", klass=FooClientProtocol, - expected_warning="rename klass to create_protocol", + deprecation_warnings=["rename klass to create_protocol"], ) def test_client_klass(self): self.assertIsInstance(self.client, FooClientProtocol) @@ -810,7 +818,7 @@ def test_client_klass(self): "/path", create_protocol=BarClientProtocol, klass=FooClientProtocol, - expected_warning="rename klass to create_protocol", + deprecation_warnings=["rename klass to create_protocol"], ) def test_client_create_protocol_over_klass(self): self.assertIsInstance(self.client, BarClientProtocol) @@ -821,14 +829,16 @@ def test_server_close_timeout(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 7) - @with_server(timeout=6, expected_warning="rename timeout to close_timeout") + @with_server(timeout=6, deprecation_warnings=["rename timeout to close_timeout"]) @with_client("/close_timeout") def test_server_timeout_backwards_compatibility(self): close_timeout = self.loop.run_until_complete(self.client.recv()) self.assertEqual(eval(close_timeout), 6) @with_server( - close_timeout=7, timeout=6, expected_warning="rename timeout to close_timeout" + close_timeout=7, + timeout=6, + deprecation_warnings=["rename timeout to close_timeout"], ) @with_client("/close_timeout") def test_server_close_timeout_over_timeout(self): @@ -842,7 +852,9 @@ def test_client_close_timeout(self): @with_server() @with_client( - "/close_timeout", timeout=6, expected_warning="rename timeout to close_timeout" + "/close_timeout", + timeout=6, + deprecation_warnings=["rename timeout to close_timeout"], ) def test_client_timeout_backwards_compatibility(self): self.assertEqual(self.client.close_timeout, 6) @@ -852,7 +864,7 @@ def test_client_timeout_backwards_compatibility(self): "/close_timeout", close_timeout=7, timeout=6, - expected_warning="rename timeout to close_timeout", + deprecation_warnings=["rename timeout to close_timeout"], ) def test_client_close_timeout_over_timeout(self): self.assertEqual(self.client.close_timeout, 7) @@ -1352,10 +1364,9 @@ def test_checking_lack_of_origin_succeeds_backwards_compatibility(self): ) client = self.loop.run_until_complete(connect(get_server_uri(server))) - self.assertEqual(len(recorded_warnings), 1) - warning = recorded_warnings[0].message - self.assertEqual(str(warning), "use None instead of '' in origins") - self.assertEqual(type(warning), DeprecationWarning) + self.assertDeprecationWarnings( + recorded_warnings, ["use None instead of '' in origins"] + ) self.loop.run_until_complete(client.send("Hello!")) self.assertEqual(self.loop.run_until_complete(client.recv()), "Hello!") diff --git a/tests/utils.py b/tests/utils.py index 2c067f8e6..983a91edf 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -69,6 +69,17 @@ def assertNoLogs(self, logger="websockets", level=logging.ERROR): level_name = logging.getLevelName(level) self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"]) + def assertDeprecationWarnings(self, recorded_warnings, expected_warnings): + """ + Check recorded deprecation warnings match a list of expected messages. + + """ + self.assertEqual(len(recorded_warnings), len(expected_warnings)) + for recorded, expected in zip(recorded_warnings, expected_warnings): + actual = recorded.message + self.assertEqual(str(actual), expected) + self.assertEqual(type(actual), DeprecationWarning) + # Unit for timeouts. May be increased on slow machines by setting the # WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. From 8d907e029996a5563ceb5b65e02406c442674733 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 19:15:41 +0200 Subject: [PATCH 131/281] Explain close code 1006. Also remove redundant message from ConnectionClosed exception: it's pretty clear that websockets.exceptions.ConnectionClosed[Error|OK] means that a WebSocket connection is closed. This makes the close code more prominent and increases the chances that users will find the explanation in the FAQ. Fix #579. Fix #624. --- docs/cheatsheet.rst | 2 ++ docs/faq.rst | 54 ++++++++++++++++++++++++++++++++++-- src/websockets/exceptions.py | 4 +-- tests/test_exceptions.py | 18 ++++-------- 4 files changed, 61 insertions(+), 17 deletions(-) diff --git a/docs/cheatsheet.rst b/docs/cheatsheet.rst index 15a731084..f897326a6 100644 --- a/docs/cheatsheet.rst +++ b/docs/cheatsheet.rst @@ -60,6 +60,8 @@ Client * If you aren't using :func:`~client.connect` as a context manager, call :meth:`~protocol.WebSocketCommonProtocol.close` to terminate the connection. +.. _debugging: + Debugging --------- diff --git a/docs/faq.rst b/docs/faq.rst index 3dfdb5bcd..cea3f5358 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -132,8 +132,8 @@ Look at the ``ssl`` argument of :meth:`~asyncio.loop.create_connection`. :func:`connect` accepts the same arguments as :meth:`~asyncio.loop.create_connection`. -Architecture ------------- +Both sides +---------- How do I do two things in parallel? How do I integrate with another coroutine? .............................................................................. @@ -154,6 +154,56 @@ websockets doesn't have built-in publish / subscribe for these use cases. Depending on the scale of your service, a simple in-memory implementation may do the job or you may need an external publish / subscribe component. +What does ``ConnectionClosedError: code = 1006`` mean? +...................................................... + +If you're seeing this traceback in the logs of a server: + +.. code-block:: pytb + + Error in connection handler + Traceback (most recent call last): + ... + asyncio.streams.IncompleteReadError: 0 bytes read on a total of 2 expected bytes + + The above exception was the direct cause of the following exception: + + Traceback (most recent call last): + ... + websockets.exceptions.ConnectionClosedError: code = 1006 (connection closed abnormally [internal]), no reason + +or if a client crashes with this traceback: + +.. code-block:: pytb + + Traceback (most recent call last): + ... + ConnectionResetError: [Errno 54] Connection reset by peer + + The above exception was the direct cause of the following exception: + + Traceback (most recent call last): + ... + websockets.exceptions.ConnectionClosedError: code = 1006 (connection closed abnormally [internal]), no reason + +it means that the TCP connection was lost. As a consequence, the WebSocket +connection was closed without receiving a close frame, which is abnormal. + +You can catch and handle :exc:`~exceptions.ConnectionClosed` to prevent it +from being logged. + +There are several reasons why long-lived connections may be lost: + +* End-user devices tend to lose network connectivity often and unpredictably + because they can move out of wireless network coverage, get unplugged from + a wired network, enter airplane mode, be put to sleep, etc. +* HTTP load balancers or proxies that aren't configured for long-lived + connections may terminate connections after a short amount of time, usually + 30 seconds. + +If you're facing a reproducible issue, :ref:`enable debug logs ` to +see when and how connections are closed. + Are there ``onopen``, ``onmessage``, ``onerror``, and ``onclose`` callbacks? ............................................................................ diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index f03ab72f2..558bdec24 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -117,9 +117,7 @@ class ConnectionClosed(WebSocketException): def __init__(self, code: int, reason: str) -> None: self.code = code self.reason = reason - message = "WebSocket connection is closed: " - message += format_close(code, reason) - super().__init__(message) + super().__init__(format_close(code, reason)) class ConnectionClosedError(ConnectionClosed): diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 72b1076ab..7ad5ad833 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -14,33 +14,27 @@ def test_str(self): ), ( ConnectionClosed(1000, ""), - "WebSocket connection is closed: code = 1000 " - "(OK), no reason", + "code = 1000 (OK), no reason", ), ( ConnectionClosed(1006, None), - "WebSocket connection is closed: code = 1006 " - "(connection closed abnormally [internal]), no reason" + "code = 1006 (connection closed abnormally [internal]), no reason" ), ( ConnectionClosed(3000, None), - "WebSocket connection is closed: code = 3000 " - "(registered), no reason" + "code = 3000 (registered), no reason" ), ( ConnectionClosed(4000, None), - "WebSocket connection is closed: code = 4000 " - "(private use), no reason" + "code = 4000 (private use), no reason" ), ( ConnectionClosedError(1016, None), - "WebSocket connection is closed: code = 1016 " - "(unknown), no reason" + "code = 1016 (unknown), no reason" ), ( ConnectionClosedOK(1001, "bye"), - "WebSocket connection is closed: code = 1001 " - "(going away), reason = bye", + "code = 1001 (going away), reason = bye", ), ( InvalidHandshake("invalid request"), From a28fed3694f45fbfbc367afa9c51beb2f296a82d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 19:29:56 +0200 Subject: [PATCH 132/281] Proof-read changelog. --- docs/changelog.rst | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index cfad4a5b5..59914b8ba 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -23,8 +23,8 @@ Changelog :meth:`~protocol.WebSocketServerProtocol.process_request` in a subclass, define it with ``async def`` instead of ``def``. - For backwards compatibility, functions are still supported. However, in - some inheritance scenarios, mixing functions and coroutines won't work. + For backwards compatibility, functions are still mostly supported, but + mixing functions and coroutines won't work in some inheritance scenarios. .. note:: @@ -45,9 +45,9 @@ Changelog .. note:: **Version 8.0 renames the** ``WebSocketProtocolError`` **exception** - :exc:`ProtocolError` **.** + to :exc:`ProtocolError` **.** - For backwards compatibility, a ``WebSocketProtocolError`` is provided. + A ``WebSocketProtocolError`` alias provides backwards compatibility. .. note:: @@ -66,7 +66,7 @@ Also: :exc:`~exceptions.ConnectionClosed` to tell apart normal connection termination from errors. -* Added :func:`~auth.basic_auth_protocol_factory` to provide HTTP Basic Auth +* Added :func:`~auth.basic_auth_protocol_factory` to enforce HTTP Basic Auth on the server side. * :func:`~client.connect` handles redirects from the server during the @@ -80,14 +80,13 @@ Also: iterators in :meth:`~protocol.WebSocketCommonProtocol.send`. * Prevented spurious log messages about :exc:`~exceptions.ConnectionClosed` - exceptions in keepalive ping task. + exceptions in keepalive ping task. If you were using ``ping_timeout=None`` + as a workaround, you can remove it. - If you were using ``ping_timeout=None`` as a workaround, you can remove it. +* Changed :meth:`WebSocketServer.close() ` to + perform a proper closing handshake instead of failing the connection. -* Changed :meth:`~server.WebSocketServer.close` to perform a proper closing - handshake instead of failing the connection. - -* Avoided a crash of a ``extra_headers`` callable returns ``None``. +* Avoided a crash when a ``extra_headers`` callable returns ``None``. * Improved error messages when HTTP parsing fails. From faa4c55c17c11fbb0441985613cf519eccb51c2c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 19:31:04 +0200 Subject: [PATCH 133/281] Bump version number. --- docs/changelog.rst | 5 ++++- docs/conf.py | 4 ++-- src/websockets/version.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 59914b8ba..e81d80d85 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,11 +3,14 @@ Changelog .. currentmodule:: websockets -8.0 +8.1 ... *In development* +8.0 +... + .. warning:: **Version 8.0 drops compatibility with Python 3.4 and 3.5.** diff --git a/docs/conf.py b/docs/conf.py index e5e6ab15f..1241a49fb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -version = '7.0' +version = '8.0' # The full version, including alpha/beta/rc tags. -release = '7.0' +release = '8.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index 96b948d8a..1aa0a5ebc 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "7.0" +version = "8.0" From 02af45351df41603c2767b004b29ab158337a667 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 7 Jul 2019 20:06:27 +0200 Subject: [PATCH 134/281] PyPI disables the "raw" directive. --- README.rst | 4 +--- setup.py | 9 +++++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 7395d803a..e2ea6df69 100644 --- a/README.rst +++ b/README.rst @@ -87,9 +87,7 @@ Does that look good?

Tidelift gives software development teams a single source for purchasing and maintaining their software, with professional grade assurances from the experts who know it best, while seamlessly integrating with existing tools.

Get supported websockets with the Tidelift Subscription


- -(If you contribute to ``websockets`` and would like to become an official -support provider, `let me know `_.) +

(If you contribute to ``websockets`` and would like to become an official support provider, let me know.)

Why should I use ``websockets``? -------------------------------- diff --git a/setup.py b/setup.py index 3c87b2339..1ea735cb6 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ import pathlib +import re import sys import setuptools @@ -10,6 +11,14 @@ long_description = (root_dir / 'README.rst').read_text(encoding='utf-8') +# PyPI disables the "raw" directive. +long_description = re.sub( + r"^\.\. raw:: html.*?^(?=\w)", + "", + long_description, + flags=re.DOTALL | re.MULTILINE, +) + exec((root_dir / 'src' / 'websockets' / 'version.py').read_text(encoding='utf-8')) py_version = sys.version_info[:2] From ec50f6b2b965f9ffa48a5760ed72376796728ede Mon Sep 17 00:00:00 2001 From: Manu NALEPA Date: Fri, 12 Jul 2019 17:29:50 +0200 Subject: [PATCH 135/281] __main.py__: Fix typo --- src/websockets/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 57d2a823b..bccb8aa52 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -48,7 +48,7 @@ def exit_from_event_loop_thread( loop.stop() if not stop.done(): # When exiting the thread that runs the event loop, raise - # KeyboardInterrupt in the main thead to exit the program. + # KeyboardInterrupt in the main thread to exit the program. try: ctrl_c = signal.CTRL_C_EVENT # Windows except AttributeError: From c1af276ab1e9fb1c323fe232e6ed768a912b61b8 Mon Sep 17 00:00:00 2001 From: Harmon Date: Mon, 15 Jul 2019 15:13:27 -0500 Subject: [PATCH 136/281] Re-expose WebSocketProtocolError --- src/websockets/exceptions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 558bdec24..9873a1717 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -57,6 +57,7 @@ "InvalidURI", "PayloadTooBig", "ProtocolError", + "WebSocketProtocolError", ] From f1f5d7d37927b020dd39c37bc75415c79b0d5b59 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 21 Jul 2019 07:37:12 +0200 Subject: [PATCH 137/281] Add changelog for #649.. --- docs/changelog.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index e81d80d85..8e862c5ec 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,6 +8,12 @@ Changelog *In development* +8.0.1 +..... + +* Restored the ability to import ``WebSocketProtocolError`` from + ``websockets``. + 8.0 ... From 5d059da31f0d967ddf300a15f03fd00a92c8712f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 21 Jul 2019 07:38:10 +0200 Subject: [PATCH 138/281] Bump version number. --- docs/conf.py | 2 +- src/websockets/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1241a49fb..560140f9b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ # The short X.Y version. version = '8.0' # The full version, including alpha/beta/rc tags. -release = '8.0' +release = '8.0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index 1aa0a5ebc..add721549 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "8.0" +version = "8.0.1" From 7e0a651a06963c0a30f6c4888a30a9e7d3a7ad68 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 31 Jul 2019 20:40:51 +0200 Subject: [PATCH 139/281] Remove incorrect assertion. create_server must receive either host + port or sock. It does its own checks anyway; we don't need to replicate them. Fix #659. --- docs/changelog.rst | 6 ++++++ src/websockets/server.py | 3 --- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8e862c5ec..6ed63b654 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,6 +8,12 @@ Changelog *In development* +8.0.2 +..... + +* Restored the ability to pass a socket with the ``sock`` parameter of + :func:`~server.serve`. + 8.0.1 ..... diff --git a/src/websockets/server.py b/src/websockets/server.py index 446f1db7f..b220a1b88 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -928,9 +928,6 @@ def __init__( ) if path is None: - # serve(..., host, port) must specify host and port parameters. - # host can be None to listen on all interfaces; port cannot be None. - assert port is not None create_server = functools.partial( loop.create_server, factory, host, port, **kwargs ) From fac562ddd5e6004949acd504c48fc91f2558593f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 31 Jul 2019 21:11:33 +0200 Subject: [PATCH 140/281] Remove incorrect assertion. Fix #646. See the ticket for details. --- docs/changelog.rst | 2 ++ src/websockets/protocol.py | 5 ++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 6ed63b654..87b2e4380 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -14,6 +14,8 @@ Changelog * Restored the ability to pass a socket with the ``sock`` parameter of :func:`~server.serve`. +* Removed an incorrect assertion when a connection drops. + 8.0.1 ..... diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 77dad5e1d..e25f4aaee 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -479,7 +479,6 @@ async def recv(self) -> Data: if self.legacy_recv: return None # type: ignore else: - assert self.state in [State.CLOSING, State.CLOSED] # Wait until the connection is closed to raise # ConnectionClosed with the correct code and reason. await self.ensure_open() @@ -760,8 +759,8 @@ async def ensure_open(self) -> None: # Handle cases from most common to least common for performance. if self.state is State.OPEN: # If self.transfer_data_task exited without a closing handshake, - # self.close_connection_task may be closing it, going straight - # from OPEN to CLOSED. + # self.close_connection_task may be closing the connection, going + # straight from OPEN to CLOSED. if self.transfer_data_task.done(): await asyncio.shield(self.close_connection_task) raise self.connection_closed_exc() From e8deaf9a93302c291eb8c05456a5bf90e94d7b63 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 31 Jul 2019 21:14:25 +0200 Subject: [PATCH 141/281] Bump version number. --- docs/conf.py | 2 +- src/websockets/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 560140f9b..617989cb1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ # The short X.Y version. version = '8.0' # The full version, including alpha/beta/rc tags. -release = '8.0.1' +release = '8.0.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index add721549..cd8898041 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "8.0.1" +version = "8.0.2" From 3f444b1629237a6795c30d55f0775f4e75728bf3 Mon Sep 17 00:00:00 2001 From: Gunnlaugur Thor Briem Date: Mon, 12 Aug 2019 21:58:08 +0000 Subject: [PATCH 142/281] fix: permit None in type annotations Fix type annotations for four parameters which are documented to accept `None`. --- src/websockets/protocol.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index e25f4aaee..0b48d0dca 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -180,11 +180,11 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): def __init__( self, *, - ping_interval: float = 20, - ping_timeout: float = 20, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, close_timeout: Optional[float] = None, - max_size: int = 2 ** 20, - max_queue: int = 2 ** 5, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, From 05d256da094759200016f123d787d315d86fc5c2 Mon Sep 17 00:00:00 2001 From: Gunnlaugur Thor Briem Date: Tue, 13 Aug 2019 09:47:54 +0000 Subject: [PATCH 143/281] fix: downstream type annotations/assertions --- src/websockets/protocol.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 0b48d0dca..1f0edcce2 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -895,6 +895,7 @@ def append(frame: Frame) -> None: def append(frame: Frame) -> None: nonlocal chunks, max_size chunks.append(decoder.decode(frame.data, frame.fin)) + assert isinstance(max_size, int) max_size -= len(frame.data) else: @@ -909,6 +910,7 @@ def append(frame: Frame) -> None: def append(frame: Frame) -> None: nonlocal chunks, max_size chunks.append(frame.data) + assert isinstance(max_size, int) max_size -= len(frame.data) append(frame) @@ -924,7 +926,7 @@ def append(frame: Frame) -> None: # mypy cannot figure out that chunks have the proper type. return ("" if text else b"").join(chunks) # type: ignore - async def read_data_frame(self, max_size: int) -> Optional[Frame]: + async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]: """ Read a single data frame from the connection. @@ -1006,7 +1008,7 @@ async def read_data_frame(self, max_size: int) -> Optional[Frame]: else: return frame - async def read_frame(self, max_size: int) -> Frame: + async def read_frame(self, max_size: Optional[int]) -> Frame: """ Read a single frame from the connection. From 4ccc512861e1b56d9152b93e133f2ec9c6118c21 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 21 Aug 2019 15:18:43 +0200 Subject: [PATCH 144/281] Fix typo in docstring. --- src/websockets/http.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/websockets/http.py b/src/websockets/http.py index e78a149ed..ba6d274bf 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -69,7 +69,7 @@ def d(value: bytes) -> str: async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: """ - Read an HTTP/1.1 GET request and returns ``(path, headers)``. + Read an HTTP/1.1 GET request and return ``(path, headers)``. ``path`` isn't URL-decoded or validated in any way. @@ -115,7 +115,7 @@ async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Headers"]: """ - Read an HTTP/1.1 response and returns ``(status_code, reason, headers)``. + Read an HTTP/1.1 response and return ``(status_code, reason, headers)``. ``reason`` and ``headers`` are expected to contain only ASCII characters. Other characters are represented with surrogate escapes. From a693ec8cfcf206dfe7b917711e20600cdceb802e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 21 Aug 2019 15:20:28 +0200 Subject: [PATCH 145/281] Update description of default TLS contexts. --- src/websockets/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 4d4a04cb8..c1fdf88a0 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -325,8 +325,8 @@ class Connect: For example, you can set the ``ssl`` keyword argument to a :class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to - a ``wss://`` URI, if this argument isn't provided explicitly, it's set to - ``True``, which means Python's default :class:`~ssl.SSLContext` is used. + a ``wss://`` URI, if this argument isn't provided explicitly, + :func:`ssl.create_default_context` is called to create a context. You can connect to a different host and port from those found in ``uri`` by setting ``host`` and ``port`` keyword arguments. This only changes the From 46ddc64b3ab02f38579880a812b9c04da6d89ae1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Wed, 21 Aug 2019 15:32:14 +0200 Subject: [PATCH 146/281] Add a new type for extension names. --- src/websockets/extensions/base.py | 8 ++++---- src/websockets/extensions/permessage_deflate.py | 8 ++++---- src/websockets/headers.py | 10 ++++++---- src/websockets/typing.py | 6 +++++- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index 7d46687c6..aa52a7adb 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -11,7 +11,7 @@ from typing import List, Optional, Sequence, Tuple from ..framing import Frame -from ..typing import ExtensionParameter +from ..typing import ExtensionName, ExtensionParameter __all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] @@ -24,7 +24,7 @@ class Extension: """ @property - def name(self) -> str: + def name(self) -> ExtensionName: """ Extension identifier. @@ -55,7 +55,7 @@ class ClientExtensionFactory: """ @property - def name(self) -> str: + def name(self) -> ExtensionName: """ Extension identifier. @@ -92,7 +92,7 @@ class ServerExtensionFactory: """ @property - def name(self) -> str: + def name(self) -> ExtensionName: """ Extension identifier. diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index a41fd56ca..e38d9edab 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -15,7 +15,7 @@ PayloadTooBig, ) from ..framing import CTRL_OPCODES, OP_CONT, Frame -from ..typing import ExtensionParameter +from ..typing import ExtensionName, ExtensionParameter from .base import ClientExtensionFactory, Extension, ServerExtensionFactory @@ -36,7 +36,7 @@ class PerMessageDeflate(Extension): """ - name = "permessage-deflate" + name = ExtensionName("permessage-deflate") def __init__( self, @@ -274,7 +274,7 @@ class ClientPerMessageDeflateFactory(ClientExtensionFactory): """ - name = "permessage-deflate" + name = ExtensionName("permessage-deflate") def __init__( self, @@ -445,7 +445,7 @@ class ServerPerMessageDeflateFactory(ServerExtensionFactory): """ - name = "permessage-deflate" + name = ExtensionName("permessage-deflate") def __init__( self, diff --git a/src/websockets/headers.py b/src/websockets/headers.py index ac850654e..f33c94c04 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -13,7 +13,7 @@ from typing import Callable, List, NewType, Optional, Sequence, Tuple, TypeVar, cast from .exceptions import InvalidHeaderFormat, InvalidHeaderValue -from .typing import ExtensionHeader, ExtensionParameter, Subprotocol +from .typing import ExtensionHeader, ExtensionName, ExtensionParameter, Subprotocol __all__ = [ @@ -313,7 +313,7 @@ def parse_extension_item( pos = parse_OWS(header, pos + 1) parameter, pos = parse_extension_item_param(header, pos, header_name) parameters.append(parameter) - return (name, parameters), pos + return (cast(ExtensionName, name), parameters), pos def parse_extension(header: str) -> List[ExtensionHeader]: @@ -344,7 +344,9 @@ def parse_extension(header: str) -> List[ExtensionHeader]: parse_extension_list = parse_extension # alias for backwards compatibility -def build_extension_item(name: str, parameters: List[ExtensionParameter]) -> str: +def build_extension_item( + name: ExtensionName, parameters: List[ExtensionParameter] +) -> str: """ Build an extension definition. @@ -352,7 +354,7 @@ def build_extension_item(name: str, parameters: List[ExtensionParameter]) -> str """ return "; ".join( - [name] + [cast(str, name)] + [ # Quoted strings aren't necessary because values are always tokens. name if value is None else f"{name}={value}" diff --git a/src/websockets/typing.py b/src/websockets/typing.py index 3847701b2..4a60f93f6 100644 --- a/src/websockets/typing.py +++ b/src/websockets/typing.py @@ -23,6 +23,10 @@ Origin.__doc__ = """Value of a Origin header""" +ExtensionName = NewType("ExtensionName", str) +ExtensionName.__doc__ = """Name of a WebSocket extension""" + + ExtensionParameter = Tuple[str, Optional[str]] ExtensionParameter__doc__ = """Parameter of a WebSocket extension""" @@ -32,7 +36,7 @@ pass -ExtensionHeader = Tuple[str, List[ExtensionParameter]] +ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]] ExtensionHeader__doc__ = """Item parsed in a Sec-WebSocket-Extensions header""" try: From a181964557eb94a17f7162a51810a8480bc1c896 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 22 Sep 2019 12:12:27 +0200 Subject: [PATCH 147/281] Build docs with Python 3.7. --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index e5e224afd..109affab4 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -2,6 +2,6 @@ build: image: latest python: - version: 3.6 + version: 3.7 requirements_file: docs/requirements.txt From c6ee4a4111b5d17d5a63dd33e941f2b0d97837b4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 22 Sep 2019 12:10:44 +0200 Subject: [PATCH 148/281] =?UTF-8?q?Require=20Python=20=E2=89=A5=203.6.1.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There've been multiple regressions where websockets stops working with Python 3.6 but works fine with Python 3.6.1 or higher. I don't have a good way to detect such regressions. I don't think it's a good practice to run anything but the latest 3.6.x (or 3.7.x, etc.) anyway. Instead of fighting a useless and losing battle, I'm moving the minimum requirement to 3.6.1. Strictly speaking, this isn't a backwards incompatible change. The incompatibility with Python 3.6 appeared in websockets 8.0, which was the first release that included 94945fec. Sure, I'm accepting the backwards incompatibility instead of fixing it... Judge me if you'd like, or support websockets on Tidelift — if it becomes profitable, I'll have an incentive to provide better support for older Python versions. Refs #655, #664, #667. --- README.rst | 2 +- docs/intro.rst | 2 +- setup.py | 8 +++----- src/websockets/framing.py | 30 +++++++----------------------- src/websockets/uri.py | 28 +++++++++++++--------------- 5 files changed, 25 insertions(+), 45 deletions(-) diff --git a/README.rst b/README.rst index e2ea6df69..5dc9a745d 100644 --- a/README.rst +++ b/README.rst @@ -128,7 +128,7 @@ Why shouldn't I use ``websockets``? and :rfc:`7692`: Compression Extensions for WebSocket. Its support for HTTP is minimal — just enough for a HTTP health check. * If you want to use Python 2: ``websockets`` builds upon ``asyncio`` which - only works on Python 3. ``websockets`` requires Python ≥ 3.6. + only works on Python 3. ``websockets`` requires Python ≥ 3.6.1. What else? ---------- diff --git a/docs/intro.rst b/docs/intro.rst index 14ba1b38a..8be700239 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -6,7 +6,7 @@ Getting started Requirements ------------ -``websockets`` requires Python ≥ 3.6. +``websockets`` requires Python ≥ 3.6.1. You should use the latest version of Python if possible. If you're using an older version, be aware that for each minor version (3.x), only the latest diff --git a/setup.py b/setup.py index 1ea735cb6..c76430104 100644 --- a/setup.py +++ b/setup.py @@ -21,10 +21,8 @@ exec((root_dir / 'src' / 'websockets' / 'version.py').read_text(encoding='utf-8')) -py_version = sys.version_info[:2] - -if py_version < (3, 6): - raise Exception("websockets requires Python >= 3.6.") +if sys.version_info[:3] < (3, 6, 1): + raise Exception("websockets requires Python >= 3.6.1.") packages = ['websockets', 'websockets/extensions'] @@ -62,6 +60,6 @@ ext_modules=ext_modules, include_package_data=True, zip_safe=False, - python_requires='>=3.6', + python_requires='>=3.6.1', test_loader='unittest:TestLoader', ) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 478a7b05a..81a3185b0 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -49,22 +49,10 @@ EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] -# Remove FrameData when dropping support for Python < 3.6.1 — the first -# version where NamedTuple supports default values, methods, and docstrings. - # Consider converting to a dataclass when dropping support for Python < 3.7. -class FrameData(NamedTuple): - fin: bool - opcode: int - data: bytes - rsv1: bool - rsv2: bool - rsv3: bool - - -class Frame(FrameData): +class Frame(NamedTuple): """ WebSocket frame. @@ -80,16 +68,12 @@ class Frame(FrameData): """ - def __new__( - cls, - fin: bool, - opcode: int, - data: bytes, - rsv1: bool = False, - rsv2: bool = False, - rsv3: bool = False, - ) -> "Frame": - return FrameData.__new__(cls, fin, opcode, data, rsv1, rsv2, rsv3) + fin: bool + opcode: int + data: bytes + rsv1: bool = False + rsv2: bool = False + rsv3: bool = False @classmethod async def read( diff --git a/src/websockets/uri.py b/src/websockets/uri.py index cbb56524b..f5bbafa96 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -20,6 +20,19 @@ class WebSocketURI(NamedTuple): + """ + WebSocket URI. + + :param bool secure: secure flag + :param str host: lower-case host + :param int port: port, always set even if it's the default + :param str resource_name: path and optional query + :param str user_info: ``(username, password)`` tuple when the URI contains + `User Information`_, else ``None``. + + .. _User Information: https://tools.ietf.org/html/rfc3986#section-3.2.1 + """ + secure: bool host: str port: int @@ -27,21 +40,6 @@ class WebSocketURI(NamedTuple): user_info: Optional[Tuple[str, str]] -# Declare the docstring normally when dropping support for Python < 3.6.1. - -WebSocketURI.__doc__ = """ -WebSocket URI. - -:param bool secure: secure flag -:param str host: lower-case host -:param int port: port, always set even if it's the default -:param str resource_name: path and optional query -:param str user_info: ``(username, password)`` tuple when the URI contains - `User Information`_, else ``None``. - -.. _User Information: https://tools.ietf.org/html/rfc3986#section-3.2.1 -""" - # Work around https://bugs.python.org/issue19931 WebSocketURI.secure.__doc__ = "" From d72322764ee6a53fdd3a8a13a1a9bf324f7f844b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 22 Sep 2019 15:19:20 +0200 Subject: [PATCH 149/281] Clarify why we leave SIGINT alone. Ref #658. --- docs/deployment.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/deployment.rst b/docs/deployment.rst index 797284f3d..5b05afff1 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -32,11 +32,16 @@ with the object returned by :func:`~server.serve`: On Unix systems, shutdown is usually triggered by sending a signal. -Here's a full example (Unix-only): +Here's a full example for handling SIGTERM on Unix: .. literalinclude:: ../example/shutdown.py :emphasize-lines: 13,17-19 +This example is easily adapted to handle other signals. If you override the +default handler for SIGINT, which raises :exc:`KeyboardInterrupt`, be aware +that you won't be able to interrupt a program with Ctrl-C anymore when it's +stuck in a loop. + It's more difficult to achieve the same effect on Windows. Some third-party projects try to help with this problem. From 8800c0cb250897feda7c6e0db2767ff67bd480a2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 11:41:21 +0200 Subject: [PATCH 150/281] Copy FlowControlMixin and StreamReaderProtocol. This is the official recommendation of Python core devs. The code is taken from the current 3.7 branch. --- src/websockets/protocol.py | 127 ++++++++++++++++++++++++++++++++++++- 1 file changed, 126 insertions(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 1f0edcce2..2f74cd23b 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -61,7 +61,132 @@ class State(enum.IntEnum): # between the check and the assignment. -class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): +class FlowControlMixin(asyncio.Protocol): + """Reusable flow control logic for StreamWriter.drain(). + This implements the protocol methods pause_writing(), + resume_writing() and connection_lost(). If the subclass overrides + these it must call the super methods. + StreamWriter.drain() must wait for _drain_helper() coroutine. + """ + + def __init__(self, loop=None): + if loop is None: + self._loop = asyncio.get_event_loop() + else: + self._loop = loop + self._paused = False + self._drain_waiter = None + self._connection_lost = False + + def pause_writing(self): + assert not self._paused + self._paused = True + if self._loop.get_debug(): + logger.debug("%r pauses writing", self) + + def resume_writing(self): + assert self._paused + self._paused = False + if self._loop.get_debug(): + logger.debug("%r resumes writing", self) + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def connection_lost(self, exc): + self._connection_lost = True + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + async def _drain_helper(self): + if self._connection_lost: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + assert waiter is None or waiter.cancelled() + waiter = self._loop.create_future() + self._drain_waiter = waiter + await waiter + + +class StreamReaderProtocol(FlowControlMixin, asyncio.Protocol): + """Helper class to adapt between Protocol and StreamReader. + (This is a helper class instead of making StreamReader itself a + Protocol subclass, because the StreamReader has other potential + uses, and to prevent the user of the StreamReader to accidentally + call inappropriate methods of the protocol.) + """ + + def __init__(self, stream_reader, client_connected_cb=None, loop=None): + super().__init__(loop=loop) + self._stream_reader = stream_reader + self._stream_writer = None + self._client_connected_cb = client_connected_cb + self._over_ssl = False + self._closed = self._loop.create_future() + + def connection_made(self, transport): + self._stream_reader.set_transport(transport) + self._over_ssl = transport.get_extra_info("sslcontext") is not None + if self._client_connected_cb is not None: + self._stream_writer = asyncio.StreamWriter( + transport, self, self._stream_reader, self._loop + ) + res = self._client_connected_cb(self._stream_reader, self._stream_writer) + if asyncio.iscoroutine(res): + self._loop.create_task(res) + + def connection_lost(self, exc): + if self._stream_reader is not None: + if exc is None: + self._stream_reader.feed_eof() + else: + self._stream_reader.set_exception(exc) + if not self._closed.done(): + if exc is None: + self._closed.set_result(None) + else: + self._closed.set_exception(exc) + super().connection_lost(exc) + self._stream_reader = None + self._stream_writer = None + + def data_received(self, data): + self._stream_reader.feed_data(data) + + def eof_received(self): + self._stream_reader.feed_eof() + if self._over_ssl: + # Prevent a warning in SSLProtocol.eof_received: + # "returning true from eof_received() + # has no effect when using ssl" + return False + return True + + def __del__(self): + # Prevent reports about unhandled exceptions. + # Better than self._closed._log_traceback = False hack + closed = self._closed + if closed.done() and not closed.cancelled(): + closed.exception() + + +class WebSocketCommonProtocol(StreamReaderProtocol): """ :class:`~asyncio.Protocol` subclass implementing the data transfer phase. From 7e7f747ca5267755ccb1bef397c3071baad9a2e1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 11:54:04 +0200 Subject: [PATCH 151/281] Remove docstrings and debug logs. --- src/websockets/protocol.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 2f74cd23b..d74c81576 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -62,12 +62,6 @@ class State(enum.IntEnum): class FlowControlMixin(asyncio.Protocol): - """Reusable flow control logic for StreamWriter.drain(). - This implements the protocol methods pause_writing(), - resume_writing() and connection_lost(). If the subclass overrides - these it must call the super methods. - StreamWriter.drain() must wait for _drain_helper() coroutine. - """ def __init__(self, loop=None): if loop is None: @@ -81,14 +75,10 @@ def __init__(self, loop=None): def pause_writing(self): assert not self._paused self._paused = True - if self._loop.get_debug(): - logger.debug("%r pauses writing", self) def resume_writing(self): assert self._paused self._paused = False - if self._loop.get_debug(): - logger.debug("%r resumes writing", self) waiter = self._drain_waiter if waiter is not None: @@ -125,12 +115,6 @@ async def _drain_helper(self): class StreamReaderProtocol(FlowControlMixin, asyncio.Protocol): - """Helper class to adapt between Protocol and StreamReader. - (This is a helper class instead of making StreamReader itself a - Protocol subclass, because the StreamReader has other potential - uses, and to prevent the user of the StreamReader to accidentally - call inappropriate methods of the protocol.) - """ def __init__(self, stream_reader, client_connected_cb=None, loop=None): super().__init__(loop=loop) From e7282008796ae30d3c3df5715b97f49e35309825 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 11:54:43 +0200 Subject: [PATCH 152/281] Merge FlowControlMixin in StreamReaderProtocol. --- src/websockets/protocol.py | 54 +++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 30 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index d74c81576..49d8b4f2d 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -61,9 +61,9 @@ class State(enum.IntEnum): # between the check and the assignment. -class FlowControlMixin(asyncio.Protocol): +class StreamReaderProtocol(asyncio.Protocol): - def __init__(self, loop=None): + def __init__(self, stream_reader, client_connected_cb=None, loop=None): if loop is None: self._loop = asyncio.get_event_loop() else: @@ -72,6 +72,12 @@ def __init__(self, loop=None): self._drain_waiter = None self._connection_lost = False + self._stream_reader = stream_reader + self._stream_writer = None + self._client_connected_cb = client_connected_cb + self._over_ssl = False + self._closed = self._loop.create_future() + def pause_writing(self): assert not self._paused self._paused = True @@ -86,22 +92,6 @@ def resume_writing(self): if not waiter.done(): waiter.set_result(None) - def connection_lost(self, exc): - self._connection_lost = True - # Wake up the writer if currently paused. - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) - async def _drain_helper(self): if self._connection_lost: raise ConnectionResetError("Connection lost") @@ -113,17 +103,6 @@ async def _drain_helper(self): self._drain_waiter = waiter await waiter - -class StreamReaderProtocol(FlowControlMixin, asyncio.Protocol): - - def __init__(self, stream_reader, client_connected_cb=None, loop=None): - super().__init__(loop=loop) - self._stream_reader = stream_reader - self._stream_writer = None - self._client_connected_cb = client_connected_cb - self._over_ssl = False - self._closed = self._loop.create_future() - def connection_made(self, transport): self._stream_reader.set_transport(transport) self._over_ssl = transport.get_extra_info("sslcontext") is not None @@ -146,7 +125,22 @@ def connection_lost(self, exc): self._closed.set_result(None) else: self._closed.set_exception(exc) - super().connection_lost(exc) + + self._connection_lost = True + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + self._stream_reader = None self._stream_writer = None From 42a436ce1dd37f4388a13d0c1591af7544c8bb1f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 11:55:53 +0200 Subject: [PATCH 153/281] Deduplicate loop and _loop attributes. --- src/websockets/protocol.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 49d8b4f2d..98c23ab1c 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -63,11 +63,7 @@ class State(enum.IntEnum): class StreamReaderProtocol(asyncio.Protocol): - def __init__(self, stream_reader, client_connected_cb=None, loop=None): - if loop is None: - self._loop = asyncio.get_event_loop() - else: - self._loop = loop + def __init__(self, stream_reader, client_connected_cb=None): self._paused = False self._drain_waiter = None self._connection_lost = False @@ -76,7 +72,7 @@ def __init__(self, stream_reader, client_connected_cb=None, loop=None): self._stream_writer = None self._client_connected_cb = client_connected_cb self._over_ssl = False - self._closed = self._loop.create_future() + self._closed = self.loop.create_future() def pause_writing(self): assert not self._paused @@ -99,7 +95,7 @@ async def _drain_helper(self): return waiter = self._drain_waiter assert waiter is None or waiter.cancelled() - waiter = self._loop.create_future() + waiter = self.loop.create_future() self._drain_waiter = waiter await waiter @@ -108,11 +104,11 @@ def connection_made(self, transport): self._over_ssl = transport.get_extra_info("sslcontext") is not None if self._client_connected_cb is not None: self._stream_writer = asyncio.StreamWriter( - transport, self, self._stream_reader, self._loop + transport, self, self._stream_reader, self.loop ) res = self._client_connected_cb(self._stream_reader, self._stream_writer) if asyncio.iscoroutine(res): - self._loop.create_task(res) + self.loop.create_task(res) def connection_lost(self, exc): if self._stream_reader is not None: @@ -315,8 +311,6 @@ def __init__( self.read_limit = read_limit self.write_limit = write_limit - # Store a reference to loop to avoid relying on self._loop, a private - # attribute of StreamReaderProtocol, inherited from FlowControlMixin. if loop is None: loop = asyncio.get_event_loop() self.loop = loop @@ -331,7 +325,7 @@ def __init__( # limit and half the buffer limit of :class:`~asyncio.StreamReader`. # That's why it must be set to half of ``self.read_limit``. stream_reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) - super().__init__(stream_reader, self.client_connected, loop) + super().__init__(stream_reader, self.client_connected) self.reader: asyncio.StreamReader self.writer: asyncio.StreamWriter From 5ed6a458b1992e4e00a2a25b0a2c378f22c3e2e7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 11:57:34 +0200 Subject: [PATCH 154/281] Remove client_connected callback. --- src/websockets/protocol.py | 31 +++++++------------------------ 1 file changed, 7 insertions(+), 24 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 98c23ab1c..bfc354a82 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -63,14 +63,13 @@ class State(enum.IntEnum): class StreamReaderProtocol(asyncio.Protocol): - def __init__(self, stream_reader, client_connected_cb=None): + def __init__(self, stream_reader): self._paused = False self._drain_waiter = None self._connection_lost = False self._stream_reader = stream_reader self._stream_writer = None - self._client_connected_cb = client_connected_cb self._over_ssl = False self._closed = self.loop.create_future() @@ -102,13 +101,11 @@ async def _drain_helper(self): def connection_made(self, transport): self._stream_reader.set_transport(transport) self._over_ssl = transport.get_extra_info("sslcontext") is not None - if self._client_connected_cb is not None: - self._stream_writer = asyncio.StreamWriter( - transport, self, self._stream_reader, self.loop - ) - res = self._client_connected_cb(self._stream_reader, self._stream_writer) - if asyncio.iscoroutine(res): - self.loop.create_task(res) + self._stream_writer = asyncio.StreamWriter( + transport, self, self._stream_reader, self.loop + ) + self.reader = self._stream_reader + self.writer = self._stream_writer def connection_lost(self, exc): if self._stream_reader is not None: @@ -325,7 +322,7 @@ def __init__( # limit and half the buffer limit of :class:`~asyncio.StreamReader`. # That's why it must be set to half of ``self.read_limit``. stream_reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) - super().__init__(stream_reader, self.client_connected) + super().__init__(stream_reader) self.reader: asyncio.StreamReader self.writer: asyncio.StreamWriter @@ -381,20 +378,6 @@ def __init__( # Task closing the TCP connection. self.close_connection_task: asyncio.Task[None] - def client_connected( - self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter - ) -> None: - """ - Callback when the TCP connection is established. - - Record references to the stream reader and the stream writer to avoid - using private attributes ``_stream_reader`` and ``_stream_writer`` of - :class:`~asyncio.StreamReaderProtocol`. - - """ - self.reader = reader - self.writer = writer - def connection_open(self) -> None: """ Callback when the WebSocket opening handshake completes. From 00ef5c3525442a943ec471f3f9f2edae8163cf7c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 12:00:31 +0200 Subject: [PATCH 155/281] Deduplicate reader/writer and _stream_reader/writer attributes. --- src/websockets/protocol.py | 36 +++++++++++++++--------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index bfc354a82..9c61a409b 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -62,14 +62,11 @@ class State(enum.IntEnum): class StreamReaderProtocol(asyncio.Protocol): - - def __init__(self, stream_reader): + def __init__(self): self._paused = False self._drain_waiter = None self._connection_lost = False - self._stream_reader = stream_reader - self._stream_writer = None self._over_ssl = False self._closed = self.loop.create_future() @@ -99,20 +96,16 @@ async def _drain_helper(self): await waiter def connection_made(self, transport): - self._stream_reader.set_transport(transport) + self.reader.set_transport(transport) self._over_ssl = transport.get_extra_info("sslcontext") is not None - self._stream_writer = asyncio.StreamWriter( - transport, self, self._stream_reader, self.loop - ) - self.reader = self._stream_reader - self.writer = self._stream_writer + self.writer = asyncio.StreamWriter(transport, self, self.reader, self.loop) def connection_lost(self, exc): - if self._stream_reader is not None: + if self.reader is not None: if exc is None: - self._stream_reader.feed_eof() + self.reader.feed_eof() else: - self._stream_reader.set_exception(exc) + self.reader.set_exception(exc) if not self._closed.done(): if exc is None: self._closed.set_result(None) @@ -134,14 +127,14 @@ def connection_lost(self, exc): else: waiter.set_exception(exc) - self._stream_reader = None - self._stream_writer = None + del self.reader + del self.writer def data_received(self, data): - self._stream_reader.feed_data(data) + self.reader.feed_data(data) def eof_received(self): - self._stream_reader.feed_eof() + self.reader.feed_eof() if self._over_ssl: # Prevent a warning in SSLProtocol.eof_received: # "returning true from eof_received() @@ -321,13 +314,14 @@ def __init__( # ``self.read_limit``. The ``limit`` argument controls the line length # limit and half the buffer limit of :class:`~asyncio.StreamReader`. # That's why it must be set to half of ``self.read_limit``. - stream_reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) - super().__init__(stream_reader) - - self.reader: asyncio.StreamReader + self.reader: asyncio.StreamReader = asyncio.StreamReader( + limit=read_limit // 2, loop=loop + ) self.writer: asyncio.StreamWriter self._drain_lock = asyncio.Lock(loop=loop) + super().__init__() + # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. # Subclasses implement the opening handshake and, on success, execute From 2707b51fec077b88060c9fca4dc2a5a50b55eda5 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 12:25:40 +0200 Subject: [PATCH 156/281] Merge asyncio.Protocol methods. --- src/websockets/protocol.py | 160 ++++++++++++++++--------------------- 1 file changed, 70 insertions(+), 90 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 9c61a409b..89e3464a6 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -70,20 +70,6 @@ def __init__(self): self._over_ssl = False self._closed = self.loop.create_future() - def pause_writing(self): - assert not self._paused - self._paused = True - - def resume_writing(self): - assert self._paused - self._paused = False - - waiter = self._drain_waiter - if waiter is not None: - self._drain_waiter = None - if not waiter.done(): - waiter.set_result(None) - async def _drain_helper(self): if self._connection_lost: raise ConnectionResetError("Connection lost") @@ -95,53 +81,6 @@ async def _drain_helper(self): self._drain_waiter = waiter await waiter - def connection_made(self, transport): - self.reader.set_transport(transport) - self._over_ssl = transport.get_extra_info("sslcontext") is not None - self.writer = asyncio.StreamWriter(transport, self, self.reader, self.loop) - - def connection_lost(self, exc): - if self.reader is not None: - if exc is None: - self.reader.feed_eof() - else: - self.reader.set_exception(exc) - if not self._closed.done(): - if exc is None: - self._closed.set_result(None) - else: - self._closed.set_exception(exc) - - self._connection_lost = True - # Wake up the writer if currently paused. - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) - - del self.reader - del self.writer - - def data_received(self, data): - self.reader.feed_data(data) - - def eof_received(self): - self.reader.feed_eof() - if self._over_ssl: - # Prevent a warning in SSLProtocol.eof_received: - # "returning true from eof_received() - # has no effect when using ssl" - return False - return True - def __del__(self): # Prevent reports about unhandled exceptions. # Better than self._closed._log_traceback = False hack @@ -1363,7 +1302,7 @@ def abort_pings(self) -> None: "%s - aborted pending ping%s: %s", self.side, plural, pings_hex ) - # asyncio.StreamReaderProtocol methods + # asyncio.Protocol methods def connection_made(self, transport: asyncio.BaseTransport) -> None: """ @@ -1382,34 +1321,11 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: logger.debug("%s - event = connection_made(%s)", self.side, transport) # mypy thinks transport is a BaseTransport, not a Transport. transport.set_write_buffer_limits(self.write_limit) # type: ignore - super().connection_made(transport) - - def eof_received(self) -> bool: - """ - Close the transport after receiving EOF. - - Since Python 3.5, `:meth:~StreamReaderProtocol.eof_received` returns - ``True`` on non-TLS connections. - - See http://bugs.python.org/issue24539 for more information. - - This is inappropriate for ``websockets`` for at least three reasons: - - 1. The use case is to read data until EOF with self.reader.read(-1). - Since WebSocket is a TLV protocol, this never happens. - - 2. It doesn't work on TLS connections. A falsy value must be - returned to have the same behavior on TLS and plain connections. - - 3. The WebSocket protocol has its own closing handshake. Endpoints - close the TCP connection after sending a close frame. - - As a consequence we revert to the previous, more useful behavior. - """ - logger.debug("%s - event = eof_received()", self.side) - super().eof_received() - return False + # Copied from asyncio.StreamReaderProtocol + self.reader.set_transport(transport) + self._over_ssl = transport.get_extra_info("sslcontext") is not None + self.writer = asyncio.StreamWriter(transport, self, self.reader, self.loop) def connection_lost(self, exc: Optional[Exception]) -> None: """ @@ -1434,4 +1350,68 @@ def connection_lost(self, exc: Optional[Exception]) -> None: # - it's set only here in connection_lost() which is called only once; # - it must never be canceled. self.connection_lost_waiter.set_result(None) - super().connection_lost(exc) + + # Copied from asyncio.StreamReaderProtocol + if self.reader is not None: + if exc is None: + self.reader.feed_eof() + else: + self.reader.set_exception(exc) + if not self._closed.done(): + if exc is None: + self._closed.set_result(None) + else: + self._closed.set_exception(exc) + + # Copied from asyncio.FlowControlMixin + self._connection_lost = True + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + del self.reader + del self.writer + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def data_received(self, data: bytes) -> None: + logger.debug("%s - event = data_received(<%d bytes>)", self.side, len(data)) + self.reader.feed_data(data) + + def eof_received(self) -> None: + """ + Close the transport after receiving EOF. + + The WebSocket protocol has its own closing handshake: endpoints close + the TCP or TLS connection after sending and receiving a close frame. + + As a consequence, they never need to write after receiving EOF, so + there's no reason to keep the transport open by returning ``True``. + + Besides, that doesn't work on TLS connections. + + """ + logger.debug("%s - event = eof_received()", self.side) + self.reader.feed_eof() From 5330199df186ab2516ea63a6588ceb50e8e4404e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:09:40 +0200 Subject: [PATCH 157/281] Finish merging StreamReaderProtocol and FlowControlMixin. --- src/websockets/protocol.py | 59 +++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 30 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 89e3464a6..2db44e5d8 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -61,35 +61,7 @@ class State(enum.IntEnum): # between the check and the assignment. -class StreamReaderProtocol(asyncio.Protocol): - def __init__(self): - self._paused = False - self._drain_waiter = None - self._connection_lost = False - - self._over_ssl = False - self._closed = self.loop.create_future() - - async def _drain_helper(self): - if self._connection_lost: - raise ConnectionResetError("Connection lost") - if not self._paused: - return - waiter = self._drain_waiter - assert waiter is None or waiter.cancelled() - waiter = self.loop.create_future() - self._drain_waiter = waiter - await waiter - - def __del__(self): - # Prevent reports about unhandled exceptions. - # Better than self._closed._log_traceback = False hack - closed = self._closed - if closed.done() and not closed.cancelled(): - closed.exception() - - -class WebSocketCommonProtocol(StreamReaderProtocol): +class WebSocketCommonProtocol(asyncio.Protocol): """ :class:`~asyncio.Protocol` subclass implementing the data transfer phase. @@ -259,7 +231,14 @@ def __init__( self.writer: asyncio.StreamWriter self._drain_lock = asyncio.Lock(loop=loop) - super().__init__() + # Copied from asyncio.FlowControlMixin + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._connection_lost = False + + # Copied from asyncio.StreamReaderProtocol + self._over_ssl = False + self._closed = self.loop.create_future() # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. @@ -311,6 +290,26 @@ def __init__( # Task closing the TCP connection. self.close_connection_task: asyncio.Task[None] + # Copied from asyncio.StreamReaderProtocol + def __del__(self) -> None: + # Prevent reports about unhandled exceptions. + # Better than self._closed._log_traceback = False hack + closed = self._closed + if closed.done() and not closed.cancelled(): + closed.exception() + + # Copied from asyncio.FlowControlMixin + async def _drain_helper(self) -> None: + if self._connection_lost: + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + assert waiter is None or waiter.cancelled() + waiter = self.loop.create_future() + self._drain_waiter = waiter + await waiter + def connection_open(self) -> None: """ Callback when the WebSocket opening handshake completes. From 25e0a5968529bdddaf240267da367a83cef5fc35 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:17:26 +0200 Subject: [PATCH 158/281] Deduplicate connection termination tracking. --- src/websockets/protocol.py | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 2db44e5d8..1cd5a91c2 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -234,11 +234,9 @@ def __init__( # Copied from asyncio.FlowControlMixin self._paused = False self._drain_waiter: Optional[asyncio.Future[None]] = None - self._connection_lost = False # Copied from asyncio.StreamReaderProtocol self._over_ssl = False - self._closed = self.loop.create_future() # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. @@ -290,17 +288,14 @@ def __init__( # Task closing the TCP connection. self.close_connection_task: asyncio.Task[None] - # Copied from asyncio.StreamReaderProtocol - def __del__(self) -> None: - # Prevent reports about unhandled exceptions. - # Better than self._closed._log_traceback = False hack - closed = self._closed - if closed.done() and not closed.cancelled(): - closed.exception() + # asyncio.StreamWriter expects this attribute on the Protocol + @property + def _closed(self) -> asyncio.Future: + return self.connection_lost_waiter # Copied from asyncio.FlowControlMixin async def _drain_helper(self) -> None: - if self._connection_lost: + if self.connection_lost_waiter.done(): raise ConnectionResetError("Connection lost") if not self._paused: return @@ -1356,14 +1351,8 @@ def connection_lost(self, exc: Optional[Exception]) -> None: self.reader.feed_eof() else: self.reader.set_exception(exc) - if not self._closed.done(): - if exc is None: - self._closed.set_result(None) - else: - self._closed.set_exception(exc) # Copied from asyncio.FlowControlMixin - self._connection_lost = True # Wake up the writer if currently paused. if not self._paused: return From d89721dd429e2fa64288ea638b0b79829f9cd222 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:21:25 +0200 Subject: [PATCH 159/281] Remove unused attribute. --- src/websockets/protocol.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 1cd5a91c2..a1c90916b 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -235,9 +235,6 @@ def __init__( self._paused = False self._drain_waiter: Optional[asyncio.Future[None]] = None - # Copied from asyncio.StreamReaderProtocol - self._over_ssl = False - # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. # Subclasses implement the opening handshake and, on success, execute @@ -1318,7 +1315,6 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: # Copied from asyncio.StreamReaderProtocol self.reader.set_transport(transport) - self._over_ssl = transport.get_extra_info("sslcontext") is not None self.writer = asyncio.StreamWriter(transport, self, self.reader, self.loop) def connection_lost(self, exc: Optional[Exception]) -> None: From e13387478c474396950259c4b2552a0e5469ae5e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:28:49 +0200 Subject: [PATCH 160/281] Ignore quality checks for code copied from asyncio. --- src/websockets/protocol.py | 51 +++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index a1c90916b..0bb12fd5a 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -287,11 +287,11 @@ def __init__( # asyncio.StreamWriter expects this attribute on the Protocol @property - def _closed(self) -> asyncio.Future: + def _closed(self) -> Any: # pragma: no cover return self.connection_lost_waiter # Copied from asyncio.FlowControlMixin - async def _drain_helper(self) -> None: + async def _drain_helper(self) -> None: # pragma: no cover if self.connection_lost_waiter.done(): raise ConnectionResetError("Connection lost") if not self._paused: @@ -1341,36 +1341,35 @@ def connection_lost(self, exc: Optional[Exception]) -> None: # - it must never be canceled. self.connection_lost_waiter.set_result(None) - # Copied from asyncio.StreamReaderProtocol - if self.reader is not None: - if exc is None: - self.reader.feed_eof() - else: - self.reader.set_exception(exc) + if True: # pragma: no cover - # Copied from asyncio.FlowControlMixin - # Wake up the writer if currently paused. - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) + # Copied from asyncio.StreamReaderProtocol + if self.reader is not None: + if exc is None: + self.reader.feed_eof() + else: + self.reader.set_exception(exc) - del self.reader - del self.writer + # Copied from asyncio.FlowControlMixin + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) - def pause_writing(self) -> None: + def pause_writing(self) -> None: # pragma: no cover assert not self._paused self._paused = True - def resume_writing(self) -> None: + def resume_writing(self) -> None: # pragma: no cover assert self._paused self._paused = False From 94d43ebc3309176ed9b57dbfa8e9cd44fa1697ee Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:54:13 +0200 Subject: [PATCH 161/281] Remove asyncio.StreamWriter. It adds only one method for flow control. Copy it, as we've already copied the rest of the flow control implementation. --- src/websockets/client.py | 2 +- src/websockets/protocol.py | 64 +++++++++++++++++++++---------------- src/websockets/server.py | 6 ++-- tests/test_client_server.py | 2 +- tests/test_protocol.py | 18 +++++------ 5 files changed, 51 insertions(+), 41 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index c1fdf88a0..34cd86240 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -85,7 +85,7 @@ def write_http_request(self, path: str, headers: Headers) -> None: request = f"GET {path} HTTP/1.1\r\n" request += str(headers) - self.writer.write(request.encode()) + self.transport.write(request.encode()) async def read_http_response(self) -> Tuple[int, Headers]: """ diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 0bb12fd5a..eb3d6bcc7 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -228,7 +228,8 @@ def __init__( self.reader: asyncio.StreamReader = asyncio.StreamReader( limit=read_limit // 2, loop=loop ) - self.writer: asyncio.StreamWriter + + self.transport: asyncio.Transport self._drain_lock = asyncio.Lock(loop=loop) # Copied from asyncio.FlowControlMixin @@ -285,11 +286,6 @@ def __init__( # Task closing the TCP connection. self.close_connection_task: asyncio.Task[None] - # asyncio.StreamWriter expects this attribute on the Protocol - @property - def _closed(self) -> Any: # pragma: no cover - return self.connection_lost_waiter - # Copied from asyncio.FlowControlMixin async def _drain_helper(self) -> None: # pragma: no cover if self.connection_lost_waiter.done(): @@ -302,6 +298,23 @@ async def _drain_helper(self) -> None: # pragma: no cover self._drain_waiter = waiter await waiter + # Copied from asyncio.StreamWriter + async def _drain(self) -> None: # pragma: no cover + if self.reader is not None: + exc = self.reader.exception() + if exc is not None: + raise exc + if self.transport is not None: + if self.transport.is_closing(): + # Yield to the event loop so connection_lost() may be + # called. Without this, _drain_helper() would return + # immediately, and code that calls + # write(...); yield from drain() + # in a loop would never call connection_lost(), so it + # would not see an error when the socket is closed. + await asyncio.sleep(0) + await self._drain_helper() + def connection_open(self) -> None: """ Callback when the WebSocket opening handshake completes. @@ -348,9 +361,9 @@ def local_address(self) -> Any: been established yet. """ - if self.writer is None: + if self.transport is None: return None - return self.writer.get_extra_info("sockname") + return self.transport.get_extra_info("sockname") @property def remote_address(self) -> Any: @@ -361,9 +374,9 @@ def remote_address(self) -> Any: been established yet. """ - if self.writer is None: + if self.transport is None: return None - return self.writer.get_extra_info("peername") + return self.transport.get_extra_info("peername") @property def open(self) -> bool: @@ -1037,7 +1050,9 @@ async def write_frame( frame = Frame(fin, opcode, data) logger.debug("%s > %r", self.side, frame) - frame.write(self.writer.write, mask=self.is_client, extensions=self.extensions) + frame.write( + self.transport.write, mask=self.is_client, extensions=self.extensions + ) try: # drain() cannot be called concurrently by multiple coroutines: @@ -1045,7 +1060,7 @@ async def write_frame( # version of Python where this bugs exists is supported anymore. async with self._drain_lock: # Handle flow control automatically. - await self.writer.drain() + await self._drain() except ConnectionError: # Terminate the connection if the socket died. self.fail_connection() @@ -1147,9 +1162,9 @@ async def close_connection(self) -> None: logger.debug("%s ! timed out waiting for TCP close", self.side) # Half-close the TCP connection if possible (when there's no TLS). - if self.writer.can_write_eof(): + if self.transport.can_write_eof(): logger.debug("%s x half-closing TCP connection", self.side) - self.writer.write_eof() + self.transport.write_eof() if await self.wait_for_connection_lost(): return @@ -1162,17 +1177,12 @@ async def close_connection(self) -> None: # If connection_lost() was called, the TCP connection is closed. # However, if TLS is enabled, the transport still needs closing. # Else asyncio complains: ResourceWarning: unclosed transport. - try: - writer_is_closing = self.writer.is_closing # type: ignore - except AttributeError: # pragma: no cover - # Python < 3.7 - writer_is_closing = self.writer.transport.is_closing - if self.connection_lost_waiter.done() and writer_is_closing(): + if self.connection_lost_waiter.done() and self.transport.is_closing(): return # Close the TCP connection. Buffers are flushed asynchronously. logger.debug("%s x closing TCP connection", self.side) - self.writer.close() + self.transport.close() if await self.wait_for_connection_lost(): return @@ -1180,8 +1190,7 @@ async def close_connection(self) -> None: # Abort the TCP connection. Buffers are discarded. logger.debug("%s x aborting TCP connection", self.side) - # mypy thinks self.writer.transport is a BaseTransport, not a Transport. - self.writer.transport.abort() # type: ignore + self.transport.abort() # connection_lost() is called quickly after aborting. await self.wait_for_connection_lost() @@ -1261,7 +1270,7 @@ def fail_connection(self, code: int = 1006, reason: str = "") -> None: frame = Frame(True, OP_CLOSE, frame_data) logger.debug("%s > %r", self.side, frame) frame.write( - self.writer.write, mask=self.is_client, extensions=self.extensions + self.transport.write, mask=self.is_client, extensions=self.extensions ) # Start close_connection_task if the opening handshake didn't succeed. @@ -1310,12 +1319,13 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: """ logger.debug("%s - event = connection_made(%s)", self.side, transport) - # mypy thinks transport is a BaseTransport, not a Transport. - transport.set_write_buffer_limits(self.write_limit) # type: ignore + + transport = cast(asyncio.Transport, transport) + transport.set_write_buffer_limits(self.write_limit) + self.transport = transport # Copied from asyncio.StreamReaderProtocol self.reader.set_transport(transport) - self.writer = asyncio.StreamWriter(transport, self, self.reader, self.loop) def connection_lost(self, exc: Optional[Exception]) -> None: """ diff --git a/src/websockets/server.py b/src/websockets/server.py index b220a1b88..1e8ae8617 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -211,7 +211,7 @@ async def handler(self) -> None: except Exception: # Last-ditch attempt to avoid leaking connections on errors. try: - self.writer.close() + self.transport.close() except Exception: # pragma: no cover pass @@ -265,11 +265,11 @@ def write_http_response( response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" response += str(headers) - self.writer.write(response.encode()) + self.transport.write(response.encode()) if body is not None: logger.debug("%s > body (%d bytes)", self.side, len(body)) - self.writer.write(body) + self.transport.write(body) async def process_request( self, path: str, request_headers: Headers diff --git a/tests/test_client_server.py b/tests/test_client_server.py index e74ec6bf6..6171f21b0 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1166,7 +1166,7 @@ def test_server_close_crashes(self, close): def test_client_closes_connection_before_handshake(self, handshake): # We have mocked the handshake() method to prevent the client from # performing the opening handshake. Force it to close the connection. - self.client.writer.close() + self.client.transport.close() # The server should stop properly anyway. It used to hang because the # task handling the connection was waiting for the opening handshake. diff --git a/tests/test_protocol.py b/tests/test_protocol.py index a6c420181..dfc2c6d45 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -94,16 +94,16 @@ def tearDown(self): # Utilities for writing tests. def make_drain_slow(self, delay=MS): - # Process connection_made in order to initialize self.protocol.writer. + # Process connection_made in order to initialize self.protocol.transport. self.run_loop_once() - original_drain = self.protocol.writer.drain + original_drain = self.protocol._drain async def delayed_drain(): await asyncio.sleep(delay, loop=self.loop) await original_drain() - self.protocol.writer.drain = delayed_drain + self.protocol._drain = delayed_drain close_frame = Frame(True, OP_CLOSE, serialize_close(1000, "close")) local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local")) @@ -321,32 +321,32 @@ def test_local_address(self): self.transport.get_extra_info = get_extra_info self.assertEqual(self.protocol.local_address, ("host", 4312)) - get_extra_info.assert_called_with("sockname", None) + get_extra_info.assert_called_with("sockname") def test_local_address_before_connection(self): # Emulate the situation before connection_open() runs. - self.protocol.writer, _writer = None, self.protocol.writer + self.protocol.transport, _transport = None, self.protocol.transport try: self.assertEqual(self.protocol.local_address, None) finally: - self.protocol.writer = _writer + self.protocol.transport = _transport def test_remote_address(self): get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) self.transport.get_extra_info = get_extra_info self.assertEqual(self.protocol.remote_address, ("host", 4312)) - get_extra_info.assert_called_with("peername", None) + get_extra_info.assert_called_with("peername") def test_remote_address_before_connection(self): # Emulate the situation before connection_open() runs. - self.protocol.writer, _writer = None, self.protocol.writer + self.protocol.transport, _transport = None, self.protocol.transport try: self.assertEqual(self.protocol.remote_address, None) finally: - self.protocol.writer = _writer + self.protocol.transport = _transport def test_open(self): self.assertTrue(self.protocol.open) From 8952c3a78a0cbf98501c94c30920a3eb4162c5d2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 13:55:16 +0200 Subject: [PATCH 162/281] Rename writer to write. It's a better name for a function that writes bytes. --- src/websockets/framing.py | 8 ++++---- tests/test_framing.py | 14 +++++++------- tests/test_protocol.py | 4 ++-- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 81a3185b0..c24b8a73d 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -147,7 +147,7 @@ async def read( def write( frame, - writer: Callable[[bytes], Any], + write: Callable[[bytes], Any], *, mask: bool, extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, @@ -156,7 +156,7 @@ def write( Write a WebSocket frame. :param frame: frame to write - :param writer: function that writes bytes + :param write: function that writes bytes :param mask: whether the frame should be masked i.e. whether the write happens on the client side :param extensions: list of classes with an ``encode()`` method that @@ -210,10 +210,10 @@ def write( # Send the frame. - # The frame is written in a single call to writer in order to prevent + # The frame is written in a single call to write in order to prevent # TCP fragmentation. See #68 for details. This also makes it safe to # send frames concurrently from multiple coroutines. - writer(output.getvalue()) + write(output.getvalue()) def check(frame) -> None: """ diff --git a/tests/test_framing.py b/tests/test_framing.py index 9e6f1871d..5def415d2 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -27,15 +27,15 @@ def decode(self, message, mask=False, max_size=None, extensions=None): return frame def encode(self, frame, mask=False, extensions=None): - writer = unittest.mock.Mock() - frame.write(writer, mask=mask, extensions=extensions) - # Ensure the entire frame is sent with a single call to writer(). + write = unittest.mock.Mock() + frame.write(write, mask=mask, extensions=extensions) + # Ensure the entire frame is sent with a single call to write(). # Multiple calls cause TCP fragmentation and degrade performance. - self.assertEqual(writer.call_count, 1) + self.assertEqual(write.call_count, 1) # The frame data is the single positional argument of that call. - self.assertEqual(len(writer.call_args[0]), 1) - self.assertEqual(len(writer.call_args[1]), 0) - return writer.call_args[0][0] + self.assertEqual(len(write.call_args[0]), 1) + self.assertEqual(len(write.call_args[1]), 0) + return write.call_args[0][0] def round_trip(self, message, expected, mask=False, extensions=None): decoded = self.decode(message, mask, extensions=extensions) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index dfc2c6d45..d2793faf5 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -114,9 +114,9 @@ def receive_frame(self, frame): Make the protocol receive a frame. """ - writer = self.protocol.data_received + write = self.protocol.data_received mask = not self.protocol.is_client - frame.write(writer, mask=mask) + frame.write(write, mask=mask) def receive_eof(self): """ From e679490cf2af87bc060fc63a0f2898444f26d5c3 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 15:49:08 +0200 Subject: [PATCH 163/281] Update to the latest version of mypy. The bugs that were locking us on an old version are fixed. --- src/websockets/__init__.py | 15 ++++++++------- src/websockets/__main__.py | 8 ++++---- src/websockets/client.py | 3 +-- src/websockets/handshake.py | 15 ++++++++++----- src/websockets/protocol.py | 14 +++++++++----- src/websockets/server.py | 11 +++-------- tox.ini | 2 +- 7 files changed, 36 insertions(+), 32 deletions(-) diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index e7ba31ce5..6bad0f7bc 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -1,12 +1,13 @@ # This relies on each of the submodules having an __all__ variable. -from .auth import * -from .client import * -from .exceptions import * -from .protocol import * -from .server import * -from .typing import * -from .uri import * +from . import auth, client, exceptions, protocol, server, typing, uri +from .auth import * # noqa +from .client import * # noqa +from .exceptions import * # noqa +from .protocol import * # noqa +from .server import * # noqa +from .typing import * # noqa +from .uri import * # noqa from .version import version as __version__ # noqa diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index bccb8aa52..394f7ac79 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -6,8 +6,8 @@ import threading from typing import Any, Set -import websockets -from websockets.exceptions import format_close +from .client import connect +from .exceptions import ConnectionClosed, format_close if sys.platform == "win32": @@ -95,7 +95,7 @@ async def run_client( stop: "asyncio.Future[None]", ) -> None: try: - websocket = await websockets.connect(uri) + websocket = await connect(uri) except Exception as exc: print_over_input(f"Failed to connect to {uri}: {exc}.") exit_from_event_loop_thread(loop, stop) @@ -122,7 +122,7 @@ async def run_client( if incoming in done: try: message = incoming.result() - except websockets.ConnectionClosed: + except ConnectionClosed: break else: if isinstance(message, str): diff --git a/src/websockets/client.py b/src/websockets/client.py index 34cd86240..725ec1e7a 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -24,7 +24,6 @@ from .extensions.permessage_deflate import ClientPerMessageDeflateFactory from .handshake import build_request, check_response from .headers import ( - ExtensionHeader, build_authorization_basic, build_extension, build_subprotocol, @@ -33,7 +32,7 @@ ) from .http import USER_AGENT, Headers, HeadersLike, read_response from .protocol import WebSocketCommonProtocol -from .typing import Origin, Subprotocol +from .typing import ExtensionHeader, Origin, Subprotocol from .uri import WebSocketURI, parse_uri diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index 17332d155..9bfe27754 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -29,9 +29,10 @@ import binascii import hashlib import random +from typing import List from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade -from .headers import parse_connection, parse_upgrade +from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade from .http import Headers, MultipleValuesError @@ -74,14 +75,16 @@ def check_request(headers: Headers) -> str: is invalid; then the server must return 400 Bad Request error """ - connection = sum( + connection: List[ConnectionOption] = sum( [parse_connection(value) for value in headers.get_all("Connection")], [] ) if not any(value.lower() == "upgrade" for value in connection): raise InvalidUpgrade("Connection", ", ".join(connection)) - upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. @@ -148,14 +151,16 @@ def check_response(headers: Headers, key: str) -> None: is invalid """ - connection = sum( + connection: List[ConnectionOption] = sum( [parse_connection(value) for value in headers.get_all("Connection")], [] ) if not any(value.lower() == "upgrade" for value in connection): raise InvalidUpgrade("Connection", " ".join(connection)) - upgrade = sum([parse_upgrade(value) for value in headers.get_all("Upgrade")], []) + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) # For compatibility with non-strict implementations, ignore case when # checking the Upgrade header. It's supposed to be 'WebSocket'. diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index eb3d6bcc7..b7c1f19c9 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -601,10 +601,14 @@ async def send( elif isinstance(message, AsyncIterable): # aiter_message = aiter(message) without aiter - aiter_message = type(message).__aiter__(message) + # https://github.com/python/mypy/issues/5738 + aiter_message = type(message).__aiter__(message) # type: ignore try: # message_chunk = anext(aiter_message) without anext - message_chunk = await type(aiter_message).__anext__(aiter_message) + # https://github.com/python/mypy/issues/5738 + message_chunk = await type(aiter_message).__anext__( # type: ignore + aiter_message + ) except StopAsyncIteration: return opcode, data = prepare_data(message_chunk) @@ -615,7 +619,8 @@ async def send( await self.write_frame(False, opcode, data) # Other fragments. - async for message_chunk in aiter_message: + # https://github.com/python/mypy/issues/5738 + async for message_chunk in aiter_message: # type: ignore confirm_opcode, data = prepare_data(message_chunk) if confirm_opcode != opcode: raise TypeError("data contains inconsistent types") @@ -899,8 +904,7 @@ async def read_message(self) -> Optional[Data]: max_size = self.max_size if text: decoder_factory = codecs.getincrementaldecoder("utf-8") - # https://github.com/python/typeshed/pull/2752 - decoder = decoder_factory(errors="strict") # type: ignore + decoder = decoder_factory(errors="strict") if max_size is None: def append(frame: Frame) -> None: diff --git a/src/websockets/server.py b/src/websockets/server.py index 1e8ae8617..5114646dd 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -39,15 +39,10 @@ from .extensions.base import Extension, ServerExtensionFactory from .extensions.permessage_deflate import ServerPerMessageDeflateFactory from .handshake import build_response, check_request -from .headers import ( - ExtensionHeader, - build_extension, - parse_extension, - parse_subprotocol, -) +from .headers import build_extension, parse_extension, parse_subprotocol from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request from .protocol import WebSocketCommonProtocol -from .typing import Origin, Subprotocol +from .typing import ExtensionHeader, Origin, Subprotocol __all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] @@ -662,7 +657,7 @@ def is_serving(self) -> bool: """ try: # Python ≥ 3.7 - return self.server.is_serving() # type: ignore + return self.server.is_serving() except AttributeError: # pragma: no cover # Python < 3.7 return self.server.sockets is not None diff --git a/tox.ini b/tox.ini index 801d4d5d1..7397c90ae 100644 --- a/tox.ini +++ b/tox.ini @@ -25,4 +25,4 @@ deps = isort [testenv:mypy] commands = mypy --strict src -deps = mypy==0.670 +deps = mypy From 65ae7cd42ca5bcd1796e33c42909752b26b197f9 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 15:49:51 +0200 Subject: [PATCH 164/281] Fix deprecation warnings on Python 3.8. * Don't pass the deprecated loop argument. * Ignore deprecation warnings for @asyncio.coroutine. --- src/websockets/protocol.py | 28 ++++++++++++++++++++-------- src/websockets/server.py | 10 +++++++--- tests/test_client_server.py | 36 +++++++++++++++++++++--------------- tests/test_protocol.py | 5 ++++- 4 files changed, 52 insertions(+), 27 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index b7c1f19c9..76d46ad9c 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -14,6 +14,7 @@ import logging import random import struct +import sys import warnings from typing import ( Any, @@ -230,7 +231,9 @@ def __init__( ) self.transport: asyncio.Transport - self._drain_lock = asyncio.Lock(loop=loop) + self._drain_lock = asyncio.Lock( + loop=loop if sys.version_info[:2] < (3, 8) else None + ) # Copied from asyncio.FlowControlMixin self._paused = False @@ -312,7 +315,9 @@ async def _drain(self) -> None: # pragma: no cover # write(...); yield from drain() # in a loop would never call connection_lost(), so it # would not see an error when the socket is closed. - await asyncio.sleep(0) + await asyncio.sleep( + 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) await self._drain_helper() def connection_open(self) -> None: @@ -483,7 +488,7 @@ async def recv(self) -> Data: # pop_message_waiter and self.transfer_data_task. await asyncio.wait( [pop_message_waiter, self.transfer_data_task], - loop=self.loop, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, return_when=asyncio.FIRST_COMPLETED, ) finally: @@ -668,7 +673,7 @@ async def close(self, code: int = 1000, reason: str = "") -> None: await asyncio.wait_for( self.write_close_frame(serialize_close(code, reason)), self.close_timeout, - loop=self.loop, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) except asyncio.TimeoutError: # If the close frame cannot be sent because the send buffers @@ -687,7 +692,9 @@ async def close(self, code: int = 1000, reason: str = "") -> None: # If close() is canceled during the wait, self.transfer_data_task # is canceled before the timeout elapses. await asyncio.wait_for( - self.transfer_data_task, self.close_timeout, loop=self.loop + self.transfer_data_task, + self.close_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) except (asyncio.TimeoutError, asyncio.CancelledError): pass @@ -1106,7 +1113,10 @@ async def keepalive_ping(self) -> None: try: while True: - await asyncio.sleep(self.ping_interval, loop=self.loop) + await asyncio.sleep( + self.ping_interval, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) # ping() raises CancelledError if the connection is closed, # when close_connection() cancels self.keepalive_ping_task. @@ -1119,7 +1129,9 @@ async def keepalive_ping(self) -> None: if self.ping_timeout is not None: try: await asyncio.wait_for( - ping_waiter, self.ping_timeout, loop=self.loop + ping_waiter, + self.ping_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) except asyncio.TimeoutError: logger.debug("%s ! timed out waiting for pong", self.side) @@ -1211,7 +1223,7 @@ async def wait_for_connection_lost(self) -> bool: await asyncio.wait_for( asyncio.shield(self.connection_lost_waiter), self.close_timeout, - loop=self.loop, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) except asyncio.TimeoutError: pass diff --git a/src/websockets/server.py b/src/websockets/server.py index 5114646dd..4f5e9e0ef 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -10,6 +10,7 @@ import http import logging import socket +import sys import warnings from types import TracebackType from typing import ( @@ -698,7 +699,9 @@ async def _close(self) -> None: # Wait until all accepted connections reach connection_made() and call # register(). See https://bugs.python.org/issue34852 for details. - await asyncio.sleep(0) + await asyncio.sleep( + 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) # Close OPEN connections with status code 1001. Since the server was # closed, handshake() closes OPENING conections with a HTTP 503 error. @@ -707,7 +710,8 @@ async def _close(self) -> None: # asyncio.wait doesn't accept an empty first argument if self.websockets: await asyncio.wait( - [websocket.close(1001) for websocket in self.websockets], loop=self.loop + [websocket.close(1001) for websocket in self.websockets], + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) # Wait until all connection handlers are complete. @@ -716,7 +720,7 @@ async def _close(self) -> None: if self.websockets: await asyncio.wait( [websocket.handler_task for websocket in self.websockets], - loop=self.loop, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) # Tell wait_closed() to return. diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 6171f21b0..85828bdbc 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -1381,13 +1381,16 @@ def test_client(self): start_server = serve(handler, "localhost", 0) server = self.loop.run_until_complete(start_server) - @asyncio.coroutine - def run_client(): - # Yield from connect. - client = yield from connect(get_server_uri(server)) - self.assertEqual(client.state, State.OPEN) - yield from client.close() - self.assertEqual(client.state, State.CLOSED) + # @asyncio.coroutine is deprecated on Python ≥ 3.8 + with warnings.catch_warnings(record=True): + + @asyncio.coroutine + def run_client(): + # Yield from connect. + client = yield from connect(get_server_uri(server)) + self.assertEqual(client.state, State.OPEN) + yield from client.close() + self.assertEqual(client.state, State.CLOSED) self.loop.run_until_complete(run_client()) @@ -1395,14 +1398,17 @@ def run_client(): self.loop.run_until_complete(server.wait_closed()) def test_server(self): - @asyncio.coroutine - def run_server(): - # Yield from serve. - server = yield from serve(handler, "localhost", 0) - self.assertTrue(server.sockets) - server.close() - yield from server.wait_closed() - self.assertFalse(server.sockets) + # @asyncio.coroutine is deprecated on Python ≥ 3.8 + with warnings.catch_warnings(record=True): + + @asyncio.coroutine + def run_server(): + # Yield from serve. + server = yield from serve(handler, "localhost", 0) + self.assertTrue(server.sockets) + server.close() + yield from server.wait_closed() + self.assertFalse(server.sockets) self.loop.run_until_complete(run_server()) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index d2793faf5..04e2a38fa 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,6 +1,7 @@ import asyncio import contextlib import logging +import sys import unittest import unittest.mock import warnings @@ -100,7 +101,9 @@ def make_drain_slow(self, delay=MS): original_drain = self.protocol._drain async def delayed_drain(): - await asyncio.sleep(delay, loop=self.loop) + await asyncio.sleep( + delay, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) await original_drain() self.protocol._drain = delayed_drain From aa7c21497ce58c03c9d10eaeb70768c484d7d6ae Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 14:02:51 +0200 Subject: [PATCH 165/281] Document and test support for Python 3.8. --- .circleci/config.yml | 12 ++++++++++++ docs/changelog.rst | 2 ++ setup.py | 1 + tox.ini | 2 +- 4 files changed, 16 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a6c85d237..0877c161a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -29,6 +29,15 @@ jobs: - checkout - run: sudo pip install tox - run: tox -e py37 + py38: + docker: + - image: circleci/python:3.8.0rc1 + steps: + # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. + - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc + - checkout + - run: sudo pip install tox + - run: tox -e py38 workflows: version: 2 @@ -41,3 +50,6 @@ workflows: - py37: requires: - main + - py38: + requires: + - main diff --git a/docs/changelog.rst b/docs/changelog.rst index 87b2e4380..2a106fbc0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -8,6 +8,8 @@ Changelog *In development* +* Added compatibility with Python 3.8. + 8.0.2 ..... diff --git a/setup.py b/setup.py index c76430104..f35819247 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', ], package_dir = {'': 'src'}, package_data = {'websockets': ['py.typed']}, diff --git a/tox.ini b/tox.ini index 7397c90ae..825e34061 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36,py37,coverage,black,flake8,isort,mypy +envlist = py36,py37,py38,coverage,black,flake8,isort,mypy [testenv] commands = python -W default -m unittest {posargs} From a9ef745899b8346526eb3e29a95b5e0f7db9a1f2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 21:18:30 +0200 Subject: [PATCH 166/281] Move test logging configuration to a single place. --- tests/__init__.py | 5 +++++ tests/test_client_server.py | 5 ----- tests/test_protocol.py | 5 ----- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..dd78609f5 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,5 @@ +import logging + + +# Avoid displaying stack traces at the ERROR logging level. +logging.basicConfig(level=logging.CRITICAL) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 85828bdbc..ce0f66ce2 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -2,7 +2,6 @@ import contextlib import functools import http -import logging import pathlib import random import socket @@ -37,10 +36,6 @@ from .utils import AsyncioTestCase -# Avoid displaying stack traces at the ERROR logging level. -logging.basicConfig(level=logging.CRITICAL) - - # Generate TLS certificate with: # $ openssl req -x509 -config test_localhost.cnf -days 15340 -newkey rsa:2048 \ # -out test_localhost.crt -keyout test_localhost.key diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 04e2a38fa..d95260a84 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,6 +1,5 @@ import asyncio import contextlib -import logging import sys import unittest import unittest.mock @@ -13,10 +12,6 @@ from .utils import MS, AsyncioTestCase -# Avoid displaying stack traces at the ERROR logging level. -logging.basicConfig(level=logging.CRITICAL) - - async def async_iterable(iterable): for item in iterable: yield item From 1d673debfd306e3e1953f0312390fa5456e09b5a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 21:33:27 +0200 Subject: [PATCH 167/281] Remove test that no longer makes sense. Since version 7.0, when the server closes, it terminates connections with close code 1001 instead of canceling them. --- tests/test_client_server.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/tests/test_client_server.py b/tests/test_client_server.py index ce0f66ce2..35913666c 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -173,7 +173,7 @@ async def process_request(self, path, request_headers): return http.HTTPStatus.OK, [("X-Access", "OK")], b"status = green\n" -class SlowServerProtocol(WebSocketServerProtocol): +class SlowOpeningHandshakeProtocol(WebSocketServerProtocol): async def process_request(self, path, request_headers): await asyncio.sleep(10 * MS) @@ -1165,7 +1165,7 @@ def test_client_closes_connection_before_handshake(self, handshake): # The server should stop properly anyway. It used to hang because the # task handling the connection was waiting for the opening handshake. - @with_server(create_protocol=SlowServerProtocol) + @with_server(create_protocol=SlowOpeningHandshakeProtocol) def test_server_shuts_down_during_opening_handshake(self): self.loop.call_later(5 * MS, self.server.close) with self.assertRaises(InvalidStatusCode) as raised: @@ -1188,20 +1188,6 @@ def test_server_shuts_down_during_connection_handling(self): self.assertEqual(self.client.close_code, 1001) self.assertEqual(server_ws.close_code, 1001) - @with_server() - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") - def test_server_shuts_down_during_connection_close(self, _close): - _close.side_effect = asyncio.CancelledError - - self.server.closing = True - with self.temp_client(): - self.loop.run_until_complete(self.client.send("Hello!")) - reply = self.loop.run_until_complete(self.client.recv()) - self.assertEqual(reply, "Hello!") - - # Websocket connection terminates abnormally. - self.assertEqual(self.client.close_code, 1006) - @with_server() def test_server_shuts_down_waits_until_handlers_terminate(self): # This handler waits a bit after the connection is closed in order From d537c26ac380a1b74444f83f31cd744f7f24bf15 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 21:41:13 +0200 Subject: [PATCH 168/281] Fix refactoring error. WebSocketCommonProtocol.transport can be unset, but it cannot be None. --- src/websockets/protocol.py | 14 ++++++++++---- tests/test_protocol.py | 8 ++++---- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 76d46ad9c..0623e1364 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -366,9 +366,12 @@ def local_address(self) -> Any: been established yet. """ - if self.transport is None: + try: + transport = self.transport + except AttributeError: return None - return self.transport.get_extra_info("sockname") + else: + return transport.get_extra_info("sockname") @property def remote_address(self) -> Any: @@ -379,9 +382,12 @@ def remote_address(self) -> Any: been established yet. """ - if self.transport is None: + try: + transport = self.transport + except AttributeError: return None - return self.transport.get_extra_info("peername") + else: + return transport.get_extra_info("peername") @property def open(self) -> bool: diff --git a/tests/test_protocol.py b/tests/test_protocol.py index d95260a84..66a822e79 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -323,8 +323,8 @@ def test_local_address(self): def test_local_address_before_connection(self): # Emulate the situation before connection_open() runs. - self.protocol.transport, _transport = None, self.protocol.transport - + _transport = self.protocol.transport + del self.protocol.transport try: self.assertEqual(self.protocol.local_address, None) finally: @@ -339,8 +339,8 @@ def test_remote_address(self): def test_remote_address_before_connection(self): # Emulate the situation before connection_open() runs. - self.protocol.transport, _transport = None, self.protocol.transport - + _transport = self.protocol.transport + del self.protocol.transport try: self.assertEqual(self.protocol.remote_address, None) finally: From 154c5fa964fe407341edad5e70367e64913023bb Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 5 Oct 2019 21:39:14 +0200 Subject: [PATCH 169/281] Remove useless type declaration. --- src/websockets/protocol.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 0623e1364..6c29b2a52 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -226,19 +226,16 @@ def __init__( # ``self.read_limit``. The ``limit`` argument controls the line length # limit and half the buffer limit of :class:`~asyncio.StreamReader`. # That's why it must be set to half of ``self.read_limit``. - self.reader: asyncio.StreamReader = asyncio.StreamReader( - limit=read_limit // 2, loop=loop - ) - - self.transport: asyncio.Transport - self._drain_lock = asyncio.Lock( - loop=loop if sys.version_info[:2] < (3, 8) else None - ) + self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) # Copied from asyncio.FlowControlMixin self._paused = False self._drain_waiter: Optional[asyncio.Future[None]] = None + self._drain_lock = asyncio.Lock( + loop=loop if sys.version_info[:2] < (3, 8) else None + ) + # This class implements the data transfer and closing handshake, which # are shared between the client-side and the server-side. # Subclasses implement the opening handshake and, on success, execute From 3dab1fbe3705ba2c24cc7672d5ca3d7f02ea3535 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 13:54:42 +0200 Subject: [PATCH 170/281] Small simplification. --- src/websockets/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 725ec1e7a..eb58f9f48 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -476,7 +476,6 @@ def __init__( # This is a coroutine function. self._create_connection = create_connection self._wsuri = wsuri - self._origin = origin def handle_redirect(self, uri: str) -> None: # Update the state of this instance to connect to a new URI. @@ -542,7 +541,7 @@ async def __await_impl__(self) -> WebSocketClientProtocol: try: await protocol.handshake( self._wsuri, - origin=self._origin, + origin=protocol.origin, available_extensions=protocol.available_extensions, available_subprotocols=protocol.available_subprotocols, extra_headers=protocol.extra_headers, From 2a87496cd80b273205bf5226ab0f9c12078b775d Mon Sep 17 00:00:00 2001 From: Anton Agestam Date: Tue, 8 Oct 2019 17:51:50 +0200 Subject: [PATCH 171/281] hardcoded top-level export --- src/websockets/__init__.py | 53 +++++++++++++++++++++++++++++++------- tests/test_exports.py | 22 ++++++++++++++++ 2 files changed, 65 insertions(+), 10 deletions(-) create mode 100644 tests/test_exports.py diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 6bad0f7bc..ea1d829a3 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -1,6 +1,5 @@ # This relies on each of the submodules having an __all__ variable. -from . import auth, client, exceptions, protocol, server, typing, uri from .auth import * # noqa from .client import * # noqa from .exceptions import * # noqa @@ -11,12 +10,46 @@ from .version import version as __version__ # noqa -__all__ = ( - auth.__all__ - + client.__all__ - + exceptions.__all__ - + protocol.__all__ - + server.__all__ - + typing.__all__ - + uri.__all__ -) +__all__ = [ + "AbortHandshake", + "basic_auth_protocol_factory", + "BasicAuthWebSocketServerProtocol", + "connect", + "ConnectionClosed", + "ConnectionClosedError", + "ConnectionClosedOK", + "Data", + "DuplicateParameter", + "ExtensionHeader", + "ExtensionParameter", + "InvalidHandshake", + "InvalidHeader", + "InvalidHeaderFormat", + "InvalidHeaderValue", + "InvalidMessage", + "InvalidOrigin", + "InvalidParameterName", + "InvalidParameterValue", + "InvalidState", + "InvalidStatusCode", + "InvalidUpgrade", + "InvalidURI", + "NegotiationError", + "Origin", + "parse_uri", + "PayloadTooBig", + "ProtocolError", + "RedirectHandshake", + "SecurityError", + "serve", + "Subprotocol", + "unix_connect", + "unix_serve", + "WebSocketClientProtocol", + "WebSocketCommonProtocol", + "WebSocketException", + "WebSocketProtocolError", + "WebSocketServer", + "WebSocketServerProtocol", + "WebSocketURI", +] diff --git a/tests/test_exports.py b/tests/test_exports.py new file mode 100644 index 000000000..7fcbc80e3 --- /dev/null +++ b/tests/test_exports.py @@ -0,0 +1,22 @@ +import unittest + +import websockets + + +combined_exports = ( + websockets.auth.__all__ + + websockets.client.__all__ + + websockets.exceptions.__all__ + + websockets.protocol.__all__ + + websockets.server.__all__ + + websockets.typing.__all__ + + websockets.uri.__all__ +) + + +class TestExportsAllSubmodules(unittest.TestCase): + def test_top_level_module_reexports_all_submodule_exports(self): + self.assertEqual(set(combined_exports), set(websockets.__all__)) + + def test_submodule_exports_are_globally_unique(self): + self.assertEqual(len(set(combined_exports)), len(combined_exports)) From d62ef45facfc07aedf1f630b891f8c06212c5c59 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 09:11:06 +0100 Subject: [PATCH 172/281] Use the new Tidelift copy in README. --- README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index 5dc9a745d..a9f54a35e 100644 --- a/README.rst +++ b/README.rst @@ -83,11 +83,11 @@ Does that look good?
-

Professionally supported websockets is now available

-

Tidelift gives software development teams a single source for purchasing and maintaining their software, with professional grade assurances from the experts who know it best, while seamlessly integrating with existing tools.

-

Get supported websockets with the Tidelift Subscription

+

websockets for enterprise

+

Available as part of the Tidelift Subscription

+

The maintainers of websockets and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. Learn more.


-

(If you contribute to ``websockets`` and would like to become an official support provider, let me know.)

+

(If you contribute to `websockets` and would like to become an official support provider, let me know.)

Why should I use ``websockets``? -------------------------------- From 0b5de4e3d11928115c56d52a983d0fc356559925 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 10:05:23 +0100 Subject: [PATCH 173/281] Add websockets for enterprise page to the docs. --- docs/_static/tidelift.png | 1 + docs/index.rst | 3 +- docs/tidelift.rst | 112 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 115 insertions(+), 1 deletion(-) create mode 120000 docs/_static/tidelift.png create mode 100644 docs/tidelift.rst diff --git a/docs/_static/tidelift.png b/docs/_static/tidelift.png new file mode 120000 index 000000000..2d1ed4a2c --- /dev/null +++ b/docs/_static/tidelift.png @@ -0,0 +1 @@ +../../logo/tidelift.png \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index c18af96e4..1b2f85f0a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -93,6 +93,7 @@ This is about websockets-the-project rather than websockets-the-software. .. toctree:: :maxdepth: 2 - contributing changelog + contributing license + For enterprise diff --git a/docs/tidelift.rst b/docs/tidelift.rst new file mode 100644 index 000000000..43b457aaf --- /dev/null +++ b/docs/tidelift.rst @@ -0,0 +1,112 @@ +websockets for enterprise +========================= + +Available as part of the Tidelift Subscription +---------------------------------------------- + +.. image:: _static/tidelift.png + :height: 150px + :width: 150px + :align: left + +Tidelift is working with the maintainers of websockets and thousands of other +open source projects to deliver commercial support and maintenance for the +open source dependencies you use to build your applications. Save time, reduce +risk, and improve code health, while paying the maintainers of the exact +dependencies you use. + +.. raw:: html + + + + + +Enterprise-ready open source software—managed for you +----------------------------------------------------- + +The Tidelift Subscription is a managed open source subscription for +application dependencies covering millions of open source projects across +JavaScript, Python, Java, PHP, Ruby, .NET, and more. + +Your subscription includes: + +* **Security updates** + + * Tidelift’s security response team coordinates patches for new breaking + security vulnerabilities and alerts immediately through a private channel, + so your software supply chain is always secure. + +* **Licensing verification and indemnification** + + * Tidelift verifies license information to enable easy policy enforcement + and adds intellectual property indemnification to cover creators and users + in case something goes wrong. You always have a 100% up-to-date bill of + materials for your dependencies to share with your legal team, customers, + or partners. + +* **Maintenance and code improvement** + + * Tidelift ensures the software you rely on keeps working as long as you + need it to work. Your managed dependencies are actively maintained and we + recruit additional maintainers where required. + +* **Package selection and version guidance** + + * We help you choose the best open source packages from the start—and then + guide you through updates to stay on the best releases as new issues + arise. + +* **Roadmap input** + + * Take a seat at the table with the creators behind the software you use. + Tidelift’s participating maintainers earn more income as their software is + used by more subscribers, so they’re interested in knowing what you need. + +* **Tooling and cloud integration** + + * Tidelift works with GitHub, GitLab, BitBucket, and more. We support every + cloud platform (and other deployment targets, too). + +The end result? All of the capabilities you expect from commercial-grade +software, for the full breadth of open source you use. That means less time +grappling with esoteric open source trivia, and more time building your own +applications—and your business. + +.. raw:: html + + From 2a3c8581a3689326d31386804b100710623526c8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 10:52:21 +0100 Subject: [PATCH 174/281] Reject invalid Basic Auth credentials. Either both username and password are provided, or none of them. --- src/websockets/uri.py | 6 +++++- tests/test_uri.py | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/websockets/uri.py b/src/websockets/uri.py index f5bbafa96..6669e5668 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -72,6 +72,10 @@ def parse_uri(uri: str) -> WebSocketURI: if parsed.query: resource_name += "?" + parsed.query user_info = None - if parsed.username or parsed.password: + if parsed.username is not None: + # urllib.parse.urlparse accepts URLs with a username but without a + # password. This doesn't make sense for HTTP Basic Auth credentials. + if parsed.password is None: + raise InvalidURI(uri) user_info = (parsed.username, parsed.password) return WebSocketURI(secure, host, port, resource_name, user_info) diff --git a/tests/test_uri.py b/tests/test_uri.py index b7b69c3c1..e41860b8e 100644 --- a/tests/test_uri.py +++ b/tests/test_uri.py @@ -16,6 +16,7 @@ "http://localhost/", "https://localhost/", "ws://localhost/path#fragment", + "ws://user@localhost/", ] From b4f6efaf829c6b6acd33294fb6cab14bdc61584b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 14:32:54 +0100 Subject: [PATCH 175/281] Make single-element tuple unpacking more explicit. The latest version of black does this. It's a good. --- src/websockets/framing.py | 6 +++--- tests/test_protocol.py | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/websockets/framing.py b/src/websockets/framing.py index c24b8a73d..26e58cdbf 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -118,10 +118,10 @@ async def read( length = head2 & 0b01111111 if length == 126: data = await reader(2) - length, = struct.unpack("!H", data) + (length,) = struct.unpack("!H", data) elif length == 127: data = await reader(8) - length, = struct.unpack("!Q", data) + (length,) = struct.unpack("!Q", data) if max_size is not None and length > max_size: raise PayloadTooBig( f"payload length exceeds size limit ({length} > {max_size} bytes)" @@ -304,7 +304,7 @@ def parse_close(data: bytes) -> Tuple[int, str]: """ length = len(data) if length >= 2: - code, = struct.unpack("!H", data[:2]) + (code,) = struct.unpack("!H", data[:2]) check_close(code) reason = data[2:].decode("utf-8") return code, reason diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 66a822e79..d32c1f72e 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -911,7 +911,7 @@ def test_abort_ping(self): def test_abort_ping_does_not_log_exception_if_not_retreived(self): self.loop.run_until_complete(self.protocol.ping()) # Get the internal Future, which isn't directly returned by ping(). - ping, = self.protocol.pings.values() + (ping,) = self.protocol.pings.values() # Remove the frame from the buffer, else close_connection() complains. self.last_sent_frame() self.close_connection() @@ -1126,13 +1126,13 @@ def test_keepalive_ping(self): # Ping is sent at 3ms and acknowledged at 4ms. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_1, = tuple(self.protocol.pings) + (ping_1,) = tuple(self.protocol.pings) self.assertOneFrameSent(True, OP_PING, ping_1) self.receive_frame(Frame(True, OP_PONG, ping_1)) # Next ping is sent at 7ms. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_2, = tuple(self.protocol.pings) + (ping_2,) = tuple(self.protocol.pings) self.assertOneFrameSent(True, OP_PING, ping_2) # The keepalive ping task goes on. @@ -1143,7 +1143,7 @@ def test_keepalive_ping_not_acknowledged_closes_connection(self): # Ping is sent at 3ms and not acknowleged. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_1, = tuple(self.protocol.pings) + (ping_1,) = tuple(self.protocol.pings) self.assertOneFrameSent(True, OP_PING, ping_1) # Connection is closed at 6ms. @@ -1183,7 +1183,7 @@ def test_keepalive_ping_does_not_crash_when_connection_lost(self): self.receive_frame(Frame(True, OP_TEXT, b"2")) # Ping is sent at 3ms. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_waiter, = tuple(self.protocol.pings.values()) + (ping_waiter,) = tuple(self.protocol.pings.values()) # Connection drops. self.receive_eof() self.loop.run_until_complete(self.protocol.wait_closed()) @@ -1210,7 +1210,7 @@ def test_keepalive_ping_with_no_ping_timeout(self): # Ping is sent at 3ms and not acknowleged. self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_1, = tuple(self.protocol.pings) + (ping_1,) = tuple(self.protocol.pings) self.assertOneFrameSent(True, OP_PING, ping_1) # Next ping is sent at 7ms anyway. From 20d1eb2e5afcc03b49aafbf113250ffdc9f432e2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 14:36:51 +0100 Subject: [PATCH 176/281] RST doesn't work inside raw HTML. --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index a9f54a35e..1e15ba198 100644 --- a/README.rst +++ b/README.rst @@ -87,7 +87,7 @@ Does that look good?

Available as part of the Tidelift Subscription

The maintainers of websockets and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. Learn more.


-

(If you contribute to `websockets` and would like to become an official support provider, let me know.)

+

(If you contribute to websockets and would like to become an official support provider, let me know.)

Why should I use ``websockets``? -------------------------------- From 139085fe2624192a5a6c72b1e5db211dcec6ced1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Nov 2019 14:39:33 +0100 Subject: [PATCH 177/281] Bump version number. --- docs/changelog.rst | 5 ++++- docs/conf.py | 4 ++-- src/websockets/version.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2a106fbc0..04f18a765 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,11 +3,14 @@ Changelog .. currentmodule:: websockets -8.1 +8.2 ... *In development* +8.1 +... + * Added compatibility with Python 3.8. 8.0.2 diff --git a/docs/conf.py b/docs/conf.py index 617989cb1..064c657bf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -version = '8.0' +version = '8.1' # The full version, including alpha/beta/rc tags. -release = '8.0.2' +release = '8.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index cd8898041..7377332e1 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "8.0.2" +version = "8.1" From 93ad88a9a8fe2ea8d96fb1d2a0f1625a3c5fee7c Mon Sep 17 00:00:00 2001 From: Alex Coplan Date: Mon, 4 Nov 2019 11:54:49 +0000 Subject: [PATCH 178/281] fix type hints on client/server args * Make ping_interval et al. optional so that code that passes None here will type check. --- src/websockets/client.py | 8 ++++---- src/websockets/server.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index eb58f9f48..831b70805 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -373,11 +373,11 @@ def __init__( *, path: Optional[str] = None, create_protocol: Optional[Type[WebSocketClientProtocol]] = None, - ping_interval: float = 20, - ping_timeout: float = 20, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, close_timeout: Optional[float] = None, - max_size: int = 2 ** 20, - max_queue: int = 2 ** 5, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, diff --git a/src/websockets/server.py b/src/websockets/server.py index 4f5e9e0ef..0313fa848 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -842,11 +842,11 @@ def __init__( *, path: Optional[str] = None, create_protocol: Optional[Type[WebSocketServerProtocol]] = None, - ping_interval: float = 20, - ping_timeout: float = 20, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, close_timeout: Optional[float] = None, - max_size: int = 2 ** 20, - max_queue: int = 2 ** 5, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, From 3bab7fd155636c73b79b258de752b36687bba347 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 16 Nov 2019 20:37:14 +0100 Subject: [PATCH 179/281] Clarify local/remote_address after connection is closed. Fix #688. --- src/websockets/protocol.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 6c29b2a52..e065bef67 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -357,10 +357,9 @@ def secure(self) -> Optional[bool]: @property def local_address(self) -> Any: """ - Local address of the connection. + Local address of the connection as a ``(host, port)`` tuple. - This is a ``(host, port)`` tuple or ``None`` if the connection hasn't - been established yet. + When the connection isn't open, ``local_address`` is ``None``. """ try: @@ -373,10 +372,9 @@ def local_address(self) -> Any: @property def remote_address(self) -> Any: """ - Remote address of the connection. + Remote address of the connection as a ``(host, port)`` tuple. - This is a ``(host, port)`` tuple or ``None`` if the connection hasn't - been established yet. + When the connection isn't open, ``remote_address`` is ``None``. """ try: From 910f417c9179150c5ab4b44c7361dbf1e51ec322 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 16 Nov 2019 20:40:15 +0100 Subject: [PATCH 180/281] Always reraise CancelledError. It's really hard to write tests for this :-( Fix #672. --- src/websockets/client.py | 2 ++ src/websockets/server.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/src/websockets/client.py b/src/websockets/client.py index 831b70805..f92350249 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -99,6 +99,8 @@ async def read_http_response(self) -> Tuple[int, Headers]: """ try: status_code, reason, headers = await read_response(self.reader) + except asyncio.CancelledError: # pragma: no cover + raise except Exception as exc: raise InvalidMessage("did not receive a valid HTTP response") from exc diff --git a/src/websockets/server.py b/src/websockets/server.py index 0313fa848..f872262ef 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -133,6 +133,8 @@ async def handler(self) -> None: available_subprotocols=self.available_subprotocols, extra_headers=self.extra_headers, ) + except asyncio.CancelledError: # pragma: no cover + raise except ConnectionError: logger.debug("Connection error in opening handshake", exc_info=True) raise @@ -231,6 +233,8 @@ async def read_http_request(self) -> Tuple[str, Headers]: """ try: path, headers = await read_request(self.reader) + except asyncio.CancelledError: # pragma: no cover + raise except Exception as exc: raise InvalidMessage("did not receive a valid HTTP request") from exc From a1615b47fcd416e5016d7e471976314c267f4349 Mon Sep 17 00:00:00 2001 From: Hugo Date: Tue, 14 Jan 2020 20:53:41 +0200 Subject: [PATCH 181/281] Fix for Python 3.10: use sys.version_info instead of sys.version --- src/websockets/http.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/websockets/http.py b/src/websockets/http.py index ba6d274bf..f87bfb76a 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -36,7 +36,8 @@ MAX_HEADERS = 256 MAX_LINE = 4096 -USER_AGENT = f"Python/{sys.version[:3]} websockets/{websockets_version}" +PYTHON_VERSION = "{}.{}".format(*sys.version_info) +USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}" def d(value: bytes) -> str: From 160dfbec7dd582c12817de5c85e6bf3fbbc34826 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 25 Jan 2020 21:10:09 +0100 Subject: [PATCH 182/281] Clarify comment about RFC inconsistency. --- src/websockets/handshake.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index 9bfe27754..646b6dba4 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -87,7 +87,8 @@ def check_request(headers: Headers) -> str: ) # For compatibility with non-strict implementations, ignore case when - # checking the Upgrade header. It's supposed to be 'WebSocket'. + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) @@ -163,7 +164,8 @@ def check_response(headers: Headers, key: str) -> None: ) # For compatibility with non-strict implementations, ignore case when - # checking the Upgrade header. It's supposed to be 'WebSocket'. + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) From 4f1964295ad0e81c8c96b99c3fe9dafc96f11f28 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 18 Feb 2020 22:09:55 +0100 Subject: [PATCH 183/281] Speculation about proof-of-stake gets old. Meanwhile, bitcoin still heats the planet. Sorry crypto buffs. Refs #480 and several others. --- docs/contributing.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/contributing.rst b/docs/contributing.rst index 40f1dbb54..61c0b979c 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -53,6 +53,9 @@ Bitcoin users websockets appears to be quite popular for interfacing with Bitcoin or other cryptocurrency trackers. I'm strongly opposed to Bitcoin's carbon footprint. +I'm aware of efforts to build proof-of-stake models. I'll care once the total +carbon footprint of all cryptocurrencies drops to a non-bullshit level. + Please stop heating the planet where my children are supposed to live, thanks. Since ``websockets`` is released under an open-source license, you can use it From 6b5cbaf41cdbc9a2074e357ccc613ef25517dd32 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Sun, 1 Mar 2020 19:10:49 +1100 Subject: [PATCH 184/281] Fix simple typo: severel -> several There is a small typo in src/websockets/client.py, src/websockets/server.py. Should read `several` rather than `severel`. --- src/websockets/client.py | 2 +- src/websockets/server.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index f92350249..be055310d 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -134,7 +134,7 @@ def process_extensions( client configuration. If no match is found, an exception is raised. If several variants of the same extension are accepted by the server, - it may be configured severel times, which won't make sense in general. + it may be configured several times, which won't make sense in general. Extensions must implement their own requirements. For this purpose, the list of previously accepted extensions is provided. diff --git a/src/websockets/server.py b/src/websockets/server.py index f872262ef..1d8de8914 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -369,7 +369,7 @@ def process_extensions( server configuration. If no match is found, the extension is ignored. If several variants of the same extension are proposed by the client, - it may be accepted severel times, which won't make sense in general. + it may be accepted several times, which won't make sense in general. Extensions must implement their own requirements. For this purpose, the list of previously accepted extensions is provided. From 18dbc49c935285e35a54e46030d326f3a49ea7b7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 23 May 2020 07:46:06 +0200 Subject: [PATCH 185/281] Run tests against the latest Python 3.8. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0877c161a..68d02416d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,7 +31,7 @@ jobs: - run: tox -e py37 py38: docker: - - image: circleci/python:3.8.0rc1 + - image: circleci/python:3.8 steps: # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc From 6170e235723f27a5aaa42ea86828f0266cc004f9 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 23 May 2020 09:30:16 +0200 Subject: [PATCH 186/281] Don't attempt to build wheels on PyPy 2.7. --- .appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 7954ee4be..2db489a76 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -6,7 +6,7 @@ skip_branch_with_pr: true environment: # websockets only works on Python >= 3.6. - CIBW_SKIP: cp27-* cp33-* cp34-* cp35-* + CIBW_SKIP: cp27-* cp33-* cp34-* cp35-* pp27-* CIBW_TEST_COMMAND: python -W default -m unittest WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 From 46e8fb5cecb474991e18f7b809378b7d76477df2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 23 May 2020 09:58:58 +0200 Subject: [PATCH 187/281] Fix flake8 violation. --- src/websockets/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index e065bef67..2082c81fc 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -349,7 +349,7 @@ def port(self) -> Optional[int]: @property def secure(self) -> Optional[bool]: - warnings.warn(f"don't use secure", DeprecationWarning) + warnings.warn("don't use secure", DeprecationWarning) return self._secure # Public API From 68dfb14963ea12e0068aefbbb43f101113d0750d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 23 May 2020 10:15:51 +0200 Subject: [PATCH 188/281] Don't attempt to build wheels on PyPy 2.7 (bis). --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 030693759..6234bb649 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ env: global: # websockets only works on Python >= 3.6. - - CIBW_SKIP="cp27-* cp33-* cp34-* cp35-*" + - CIBW_SKIP="cp27-* cp33-* cp34-* cp35-* pp27-*" - CIBW_TEST_COMMAND="python3 -W default -m unittest" - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 From fafcf65d430149a8b94379f9557655828a0dcdab Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 23 May 2020 12:56:22 +0200 Subject: [PATCH 189/281] Only build wheels on supported CPython versions. PyPy 3 wheels were failing to build on macOS. --- .appveyor.yml | 2 +- .travis.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index 2db489a76..d34b15aed 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -6,7 +6,7 @@ skip_branch_with_pr: true environment: # websockets only works on Python >= 3.6. - CIBW_SKIP: cp27-* cp33-* cp34-* cp35-* pp27-* + CIBW_BUILD: cp36-* cp37-* cp38-* CIBW_TEST_COMMAND: python -W default -m unittest WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 diff --git a/.travis.yml b/.travis.yml index 6234bb649..26e1de60e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ env: global: # websockets only works on Python >= 3.6. - - CIBW_SKIP="cp27-* cp33-* cp34-* cp35-* pp27-*" + - CIBW_BUILD="cp36-* cp37-* cp38-*" - CIBW_TEST_COMMAND="python3 -W default -m unittest" - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 From 69c94af5c0ad19402e0bedcc6b61a23fa070c946 Mon Sep 17 00:00:00 2001 From: David Bordeynik Date: Mon, 18 May 2020 10:38:08 +0300 Subject: [PATCH 190/281] Future-proof asyncio.wait usage. Fix #762. --- .circleci/config.yml | 12 ++++++++++++ .gitignore | 1 + src/websockets/server.py | 5 ++++- tox.ini | 2 +- 4 files changed, 18 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 68d02416d..7be85d7f9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -38,6 +38,15 @@ jobs: - checkout - run: sudo pip install tox - run: tox -e py38 + py39: + docker: + - image: circleci/python:3.9.0b1 + steps: + # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. + - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc + - checkout + - run: sudo pip install tox + - run: tox -e py39 workflows: version: 2 @@ -53,3 +62,6 @@ workflows: - py38: requires: - main + - py39: + requires: + - main diff --git a/.gitignore b/.gitignore index ef0d16520..c23cf5210 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.pyc *.so .coverage +.idea/ .mypy_cache .tox build/ diff --git a/src/websockets/server.py b/src/websockets/server.py index 1d8de8914..e9318a4df 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -714,7 +714,10 @@ async def _close(self) -> None: # asyncio.wait doesn't accept an empty first argument if self.websockets: await asyncio.wait( - [websocket.close(1001) for websocket in self.websockets], + [ + asyncio.ensure_future(websocket.close(1001)) + for websocket in self.websockets + ], loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) diff --git a/tox.ini b/tox.ini index 825e34061..cc224f9c6 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py36,py37,py38,coverage,black,flake8,isort,mypy +envlist = py36,py37,py38,py39,coverage,black,flake8,isort,mypy [testenv] commands = python -W default -m unittest {posargs} From 24a77def7097cb7ae651edf35582c8def5a6ad3e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 13 Jun 2020 17:41:06 +0200 Subject: [PATCH 191/281] Update to mypy 0.780. --- src/websockets/__main__.py | 8 ++++---- src/websockets/typing.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 394f7ac79..1a720498d 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -49,10 +49,10 @@ def exit_from_event_loop_thread( if not stop.done(): # When exiting the thread that runs the event loop, raise # KeyboardInterrupt in the main thread to exit the program. - try: - ctrl_c = signal.CTRL_C_EVENT # Windows - except AttributeError: - ctrl_c = signal.SIGINT # POSIX + if sys.platform == "win32": + ctrl_c = signal.CTRL_C_EVENT + else: + ctrl_c = signal.SIGINT os.kill(os.getpid(), ctrl_c) diff --git a/src/websockets/typing.py b/src/websockets/typing.py index 4a60f93f6..a5062bc4b 100644 --- a/src/websockets/typing.py +++ b/src/websockets/typing.py @@ -14,7 +14,7 @@ """ # Remove try / except when dropping support for Python < 3.7 try: - Data.__doc__ = Data__doc__ # type: ignore + Data.__doc__ = Data__doc__ except AttributeError: # pragma: no cover pass @@ -31,7 +31,7 @@ ExtensionParameter__doc__ = """Parameter of a WebSocket extension""" try: - ExtensionParameter.__doc__ = ExtensionParameter__doc__ # type: ignore + ExtensionParameter.__doc__ = ExtensionParameter__doc__ except AttributeError: # pragma: no cover pass @@ -40,7 +40,7 @@ ExtensionHeader__doc__ = """Item parsed in a Sec-WebSocket-Extensions header""" try: - ExtensionHeader.__doc__ = ExtensionHeader__doc__ # type: ignore + ExtensionHeader.__doc__ = ExtensionHeader__doc__ except AttributeError: # pragma: no cover pass From 017a072705408d3df945e333e5edd93e0aa8c706 Mon Sep 17 00:00:00 2001 From: Ram Rachum Date: Fri, 12 Jun 2020 23:16:57 +0300 Subject: [PATCH 192/281] Fix exception causes in server.py --- src/websockets/server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/websockets/server.py b/src/websockets/server.py index e9318a4df..0f0b51a7c 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -340,8 +340,8 @@ def process_origin( # per https://tools.ietf.org/html/rfc6454#section-7.3. try: origin = cast(Origin, headers.get("Origin")) - except MultipleValuesError: - raise InvalidHeader("Origin", "more than one Origin header found") + except MultipleValuesError as exc: + raise InvalidHeader("Origin", "more than one Origin header found") from exc if origins is not None: if origin not in origins: raise InvalidOrigin(origin) From 17499930cec591778d13e594b0cb978a9961e276 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 13:39:43 +0200 Subject: [PATCH 193/281] Ignore coverage measurement issue. --- src/websockets/protocol.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 2082c81fc..803970205 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1175,7 +1175,9 @@ async def close_connection(self) -> None: # A client should wait for a TCP close from the server. if self.is_client and hasattr(self, "transfer_data_task"): if await self.wait_for_connection_lost(): - return + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover logger.debug("%s ! timed out waiting for TCP close", self.side) # Half-close the TCP connection if possible (when there's no TLS). @@ -1184,7 +1186,9 @@ async def close_connection(self) -> None: self.transport.write_eof() if await self.wait_for_connection_lost(): - return + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover logger.debug("%s ! timed out waiting for TCP close", self.side) finally: @@ -1210,7 +1214,9 @@ async def close_connection(self) -> None: self.transport.abort() # connection_lost() is called quickly after aborting. - await self.wait_for_connection_lost() + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + await self.wait_for_connection_lost() # pragma: no cover async def wait_for_connection_lost(self) -> bool: """ From f0cfa6ba2abf6d4b032b30cfae9d321e583d546e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 18:04:08 +0200 Subject: [PATCH 194/281] Realign docstring with Python version. --- src/websockets/speedups.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/speedups.c b/src/websockets/speedups.c index d1c2b37e6..ede181e5d 100644 --- a/src/websockets/speedups.c +++ b/src/websockets/speedups.c @@ -181,7 +181,7 @@ static PyMethodDef speedups_methods[] = { "apply_mask", (PyCFunction)apply_mask, METH_VARARGS | METH_KEYWORDS, - "Apply masking to websocket message.", + "Apply masking to the data of a WebSocket message.", }, {NULL, NULL, 0, NULL}, /* Sentinel */ }; From daad5180e09af5d860edf4191fb1791eb6b57cc8 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 20:21:51 +0200 Subject: [PATCH 195/281] =?UTF-8?q?Upgrade=20to=20isort=20=E2=89=A5=205.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Makefile | 2 +- setup.cfg | 6 +----- tox.ini | 2 +- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index d9e16fefe..06832945c 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ export PYTHONPATH=src default: coverage style style: - isort --recursive src tests + isort src tests black src tests flake8 src tests mypy --strict src diff --git a/setup.cfg b/setup.cfg index c306b2d4f..02e70cdf5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,13 +9,9 @@ ignore = E731,F403,F405,W503 max-line-length = 88 [isort] +profile = black combine_as_imports = True -force_grid_wrap = 0 -include_trailing_comma = True -known_standard_library = asyncio -line_length = 88 lines_after_imports = 2 -multi_line_output = 3 [coverage:run] branch = True diff --git a/tox.ini b/tox.ini index cc224f9c6..b5488e5b0 100644 --- a/tox.ini +++ b/tox.ini @@ -20,7 +20,7 @@ commands = flake8 src tests deps = flake8 [testenv:isort] -commands = isort --check-only --recursive src tests +commands = isort --check-only src tests deps = isort [testenv:mypy] From 85b3fd67490bc1e5aa9e46c292c00aceeaa0d40b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 10:28:03 +0200 Subject: [PATCH 196/281] Move Headers class to its own module. This allows breaking an import loop. --- docs/api.rst | 6 ++ docs/changelog.rst | 12 ++- src/websockets/__init__.py | 1 + src/websockets/auth.py | 2 +- src/websockets/client.py | 3 +- src/websockets/datastructures.py | 159 ++++++++++++++++++++++++++++ src/websockets/exceptions.py | 2 +- src/websockets/handshake.py | 2 +- src/websockets/http.py | 173 ++----------------------------- src/websockets/protocol.py | 2 +- src/websockets/server.py | 3 +- tests/test_client_server.py | 3 +- tests/test_datastructures.py | 131 +++++++++++++++++++++++ tests/test_exceptions.py | 2 +- tests/test_handshake.py | 2 +- tests/test_http.py | 114 -------------------- 16 files changed, 330 insertions(+), 287 deletions(-) create mode 100644 src/websockets/datastructures.py create mode 100644 tests/test_datastructures.py diff --git a/docs/api.rst b/docs/api.rst index d265a91c2..f7706ee2c 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -115,6 +115,12 @@ HTTP Basic Auth .. automethod:: process_request +Data structures +............... + +.. automodule:: websockets.datastructures + :members: + Exceptions .......... diff --git a/docs/changelog.rst b/docs/changelog.rst index 04f18a765..5de7357ca 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,11 +3,21 @@ Changelog .. currentmodule:: websockets -8.2 +9.0 ... *In development* +.. note:: + + **Version 9.0 moves or deprecates several low-level APIs.** + + * Import :class:`~datastructures.Headers` and + :exc:`~datastructures.MultipleValuesError` from + :mod:`websockets.datastructures` instead of :mod:`websockets.http`. + + Aliases provide backwards compatibility for all previously public APIs. + 8.1 ... diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index ea1d829a3..89829235c 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -2,6 +2,7 @@ from .auth import * # noqa from .client import * # noqa +from .datastructures import * # noqa from .exceptions import * # noqa from .protocol import * # noqa from .server import * # noqa diff --git a/src/websockets/auth.py b/src/websockets/auth.py index ae204b8d9..8198cd9d0 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -9,9 +9,9 @@ import http from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Type, Union +from .datastructures import Headers from .exceptions import InvalidHeader from .headers import build_www_authenticate_basic, parse_authorization_basic -from .http import Headers from .server import HTTPResponse, WebSocketServerProtocol diff --git a/src/websockets/client.py b/src/websockets/client.py index be055310d..26a369c47 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -11,6 +11,7 @@ from types import TracebackType from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast +from .datastructures import Headers, HeadersLike from .exceptions import ( InvalidHandshake, InvalidHeader, @@ -30,7 +31,7 @@ parse_extension, parse_subprotocol, ) -from .http import USER_AGENT, Headers, HeadersLike, read_response +from .http import USER_AGENT, read_response from .protocol import WebSocketCommonProtocol from .typing import ExtensionHeader, Origin, Subprotocol from .uri import WebSocketURI, parse_uri diff --git a/src/websockets/datastructures.py b/src/websockets/datastructures.py new file mode 100644 index 000000000..f70d92ad7 --- /dev/null +++ b/src/websockets/datastructures.py @@ -0,0 +1,159 @@ +""" +This module defines a data structure for manipulating HTTP headers. + +""" + +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + Tuple, + Union, +) + + +__all__ = ["Headers", "MultipleValuesError"] + + +class MultipleValuesError(LookupError): + """ + Exception raised when :class:`Headers` has more than one value for a key. + + """ + + def __str__(self) -> str: + # Implement the same logic as KeyError_str in Objects/exceptions.c. + if len(self.args) == 1: + return repr(self.args[0]) + return super().__str__() + + +class Headers(MutableMapping[str, str]): + """ + Efficient data structure for manipulating HTTP headers. + + A :class:`list` of ``(name, values)`` is inefficient for lookups. + + A :class:`dict` doesn't suffice because header names are case-insensitive + and multiple occurrences of headers with the same name are possible. + + :class:`Headers` stores HTTP headers in a hybrid data structure to provide + efficient insertions and lookups while preserving the original data. + + In order to account for multiple values with minimal hassle, + :class:`Headers` follows this logic: + + - When getting a header with ``headers[name]``: + - if there's no value, :exc:`KeyError` is raised; + - if there's exactly one value, it's returned; + - if there's more than one value, :exc:`MultipleValuesError` is raised. + + - When setting a header with ``headers[name] = value``, the value is + appended to the list of values for that header. + + - When deleting a header with ``del headers[name]``, all values for that + header are removed (this is slow). + + Other methods for manipulating headers are consistent with this logic. + + As long as no header occurs multiple times, :class:`Headers` behaves like + :class:`dict`, except keys are lower-cased to provide case-insensitivity. + + Two methods support support manipulating multiple values explicitly: + + - :meth:`get_all` returns a list of all values for a header; + - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs. + + """ + + __slots__ = ["_dict", "_list"] + + def __init__(self, *args: Any, **kwargs: str) -> None: + self._dict: Dict[str, List[str]] = {} + self._list: List[Tuple[str, str]] = [] + # MutableMapping.update calls __setitem__ for each (name, value) pair. + self.update(*args, **kwargs) + + def __str__(self) -> str: + return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._list!r})" + + def copy(self) -> "Headers": + copy = self.__class__() + copy._dict = self._dict.copy() + copy._list = self._list.copy() + return copy + + def serialize(self) -> bytes: + # Headers only contain ASCII characters. + return str(self).encode() + + # Collection methods + + def __contains__(self, key: object) -> bool: + return isinstance(key, str) and key.lower() in self._dict + + def __iter__(self) -> Iterator[str]: + return iter(self._dict) + + def __len__(self) -> int: + return len(self._dict) + + # MutableMapping methods + + def __getitem__(self, key: str) -> str: + value = self._dict[key.lower()] + if len(value) == 1: + return value[0] + else: + raise MultipleValuesError(key) + + def __setitem__(self, key: str, value: str) -> None: + self._dict.setdefault(key.lower(), []).append(value) + self._list.append((key, value)) + + def __delitem__(self, key: str) -> None: + key_lower = key.lower() + self._dict.__delitem__(key_lower) + # This is inefficent. Fortunately deleting HTTP headers is uncommon. + self._list = [(k, v) for k, v in self._list if k.lower() != key_lower] + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Headers): + return NotImplemented + return self._list == other._list + + def clear(self) -> None: + """ + Remove all headers. + + """ + self._dict = {} + self._list = [] + + # Methods for handling multiple values + + def get_all(self, key: str) -> List[str]: + """ + Return the (possibly empty) list of all values for a header. + + :param key: header name + + """ + return self._dict.get(key.lower(), []) + + def raw_items(self) -> Iterator[Tuple[str, str]]: + """ + Return an iterator of all values as ``(name, value)`` pairs. + + """ + return iter(self._list) + + +HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]] diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 9873a1717..e593f1adc 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -30,7 +30,7 @@ import http from typing import Optional -from .http import Headers, HeadersLike +from .datastructures import Headers, HeadersLike __all__ = [ diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index 646b6dba4..e30a67125 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -31,9 +31,9 @@ import random from typing import List +from .datastructures import Headers, MultipleValuesError from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade -from .http import Headers, MultipleValuesError __all__ = ["build_request", "check_request", "build_response", "check_response"] diff --git a/src/websockets/http.py b/src/websockets/http.py index f87bfb76a..ddb2afcfa 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -10,28 +10,15 @@ import asyncio import re import sys -from typing import ( - Any, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Tuple, - Union, -) +from typing import Tuple +# For backwards compatibility - should be deprecated +from .datastructures import Headers, MultipleValuesError # noqa +from .exceptions import SecurityError from .version import version as websockets_version -__all__ = [ - "read_request", - "read_response", - "Headers", - "MultipleValuesError", - "USER_AGENT", -] +__all__ = ["read_request", "read_response", "USER_AGENT"] MAX_HEADERS = 256 MAX_LINE = 4096 @@ -68,7 +55,7 @@ def d(value: bytes) -> str: _value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") -async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: +async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]: """ Read an HTTP/1.1 GET request and return ``(path, headers)``. @@ -114,7 +101,7 @@ async def read_request(stream: asyncio.StreamReader) -> Tuple[str, "Headers"]: return path, headers -async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Headers"]: +async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]: """ Read an HTTP/1.1 response and return ``(status_code, reason, headers)``. @@ -163,7 +150,7 @@ async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, "Header return status_code, reason, headers -async def read_headers(stream: asyncio.StreamReader) -> "Headers": +async def read_headers(stream: asyncio.StreamReader) -> Headers: """ Read HTTP headers from ``stream``. @@ -198,7 +185,7 @@ async def read_headers(stream: asyncio.StreamReader) -> "Headers": headers[name] = value else: - raise websockets.exceptions.SecurityError("too many HTTP headers") + raise SecurityError("too many HTTP headers") return headers @@ -214,148 +201,8 @@ async def read_line(stream: asyncio.StreamReader) -> bytes: line = await stream.readline() # Security: this guarantees header values are small (hard-coded = 4 KiB) if len(line) > MAX_LINE: - raise websockets.exceptions.SecurityError("line too long") + raise SecurityError("line too long") # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 if not line.endswith(b"\r\n"): raise EOFError("line without CRLF") return line[:-2] - - -class MultipleValuesError(LookupError): - """ - Exception raised when :class:`Headers` has more than one value for a key. - - """ - - def __str__(self) -> str: - # Implement the same logic as KeyError_str in Objects/exceptions.c. - if len(self.args) == 1: - return repr(self.args[0]) - return super().__str__() - - -class Headers(MutableMapping[str, str]): - """ - Efficient data structure for manipulating HTTP headers. - - A :class:`list` of ``(name, values)`` is inefficient for lookups. - - A :class:`dict` doesn't suffice because header names are case-insensitive - and multiple occurrences of headers with the same name are possible. - - :class:`Headers` stores HTTP headers in a hybrid data structure to provide - efficient insertions and lookups while preserving the original data. - - In order to account for multiple values with minimal hassle, - :class:`Headers` follows this logic: - - - When getting a header with ``headers[name]``: - - if there's no value, :exc:`KeyError` is raised; - - if there's exactly one value, it's returned; - - if there's more than one value, :exc:`MultipleValuesError` is raised. - - - When setting a header with ``headers[name] = value``, the value is - appended to the list of values for that header. - - - When deleting a header with ``del headers[name]``, all values for that - header are removed (this is slow). - - Other methods for manipulating headers are consistent with this logic. - - As long as no header occurs multiple times, :class:`Headers` behaves like - :class:`dict`, except keys are lower-cased to provide case-insensitivity. - - Two methods support support manipulating multiple values explicitly: - - - :meth:`get_all` returns a list of all values for a header; - - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs. - - """ - - __slots__ = ["_dict", "_list"] - - def __init__(self, *args: Any, **kwargs: str) -> None: - self._dict: Dict[str, List[str]] = {} - self._list: List[Tuple[str, str]] = [] - # MutableMapping.update calls __setitem__ for each (name, value) pair. - self.update(*args, **kwargs) - - def __str__(self) -> str: - return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n" - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self._list!r})" - - def copy(self) -> "Headers": - copy = self.__class__() - copy._dict = self._dict.copy() - copy._list = self._list.copy() - return copy - - # Collection methods - - def __contains__(self, key: object) -> bool: - return isinstance(key, str) and key.lower() in self._dict - - def __iter__(self) -> Iterator[str]: - return iter(self._dict) - - def __len__(self) -> int: - return len(self._dict) - - # MutableMapping methods - - def __getitem__(self, key: str) -> str: - value = self._dict[key.lower()] - if len(value) == 1: - return value[0] - else: - raise MultipleValuesError(key) - - def __setitem__(self, key: str, value: str) -> None: - self._dict.setdefault(key.lower(), []).append(value) - self._list.append((key, value)) - - def __delitem__(self, key: str) -> None: - key_lower = key.lower() - self._dict.__delitem__(key_lower) - # This is inefficent. Fortunately deleting HTTP headers is uncommon. - self._list = [(k, v) for k, v in self._list if k.lower() != key_lower] - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Headers): - return NotImplemented - return self._list == other._list - - def clear(self) -> None: - """ - Remove all headers. - - """ - self._dict = {} - self._list = [] - - # Methods for handling multiple values - - def get_all(self, key: str) -> List[str]: - """ - Return the (possibly empty) list of all values for a header. - - :param key: header name - - """ - return self._dict.get(key.lower(), []) - - def raw_items(self) -> Iterator[Tuple[str, str]]: - """ - Return an iterator of all values as ``(name, value)`` pairs. - - """ - return iter(self._list) - - -HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]] - - -# at the bottom to allow circular import, because AbortHandshake depends on HeadersLike -import websockets.exceptions # isort:skip # noqa diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 803970205..60235643e 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -30,6 +30,7 @@ cast, ) +from .datastructures import Headers from .exceptions import ( ConnectionClosed, ConnectionClosedError, @@ -41,7 +42,6 @@ from .extensions.base import Extension from .framing import * from .handshake import * -from .http import Headers from .typing import Data diff --git a/src/websockets/server.py b/src/websockets/server.py index 0f0b51a7c..da98cac05 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -28,6 +28,7 @@ cast, ) +from .datastructures import Headers, HeadersLike, MultipleValuesError from .exceptions import ( AbortHandshake, InvalidHandshake, @@ -41,7 +42,7 @@ from .extensions.permessage_deflate import ServerPerMessageDeflateFactory from .handshake import build_response, check_request from .headers import build_extension, parse_extension, parse_subprotocol -from .http import USER_AGENT, Headers, HeadersLike, MultipleValuesError, read_request +from .http import USER_AGENT, read_request from .protocol import WebSocketCommonProtocol from .typing import ExtensionHeader, Origin, Subprotocol diff --git a/tests/test_client_server.py b/tests/test_client_server.py index 35913666c..ba0984c80 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -14,6 +14,7 @@ import warnings from websockets.client import * +from websockets.datastructures import Headers from websockets.exceptions import ( ConnectionClosed, InvalidHandshake, @@ -27,7 +28,7 @@ ServerPerMessageDeflateFactory, ) from websockets.handshake import build_response -from websockets.http import USER_AGENT, Headers, read_response +from websockets.http import USER_AGENT, read_response from websockets.protocol import State from websockets.server import * from websockets.uri import parse_uri diff --git a/tests/test_datastructures.py b/tests/test_datastructures.py new file mode 100644 index 000000000..628cbcb02 --- /dev/null +++ b/tests/test_datastructures.py @@ -0,0 +1,131 @@ +import unittest + +from websockets.datastructures import * + + +class HeadersTests(unittest.TestCase): + def setUp(self): + self.headers = Headers([("Connection", "Upgrade"), ("Server", "websockets")]) + + def test_str(self): + self.assertEqual( + str(self.headers), "Connection: Upgrade\r\nServer: websockets\r\n\r\n" + ) + + def test_repr(self): + self.assertEqual( + repr(self.headers), + "Headers([('Connection', 'Upgrade'), ('Server', 'websockets')])", + ) + + def test_copy(self): + self.assertEqual(repr(self.headers.copy()), repr(self.headers)) + + def test_serialize(self): + self.assertEqual( + self.headers.serialize(), + b"Connection: Upgrade\r\nServer: websockets\r\n\r\n", + ) + + def test_multiple_values_error_str(self): + self.assertEqual(str(MultipleValuesError("Connection")), "'Connection'") + self.assertEqual(str(MultipleValuesError()), "") + + def test_contains(self): + self.assertIn("Server", self.headers) + + def test_contains_case_insensitive(self): + self.assertIn("server", self.headers) + + def test_contains_not_found(self): + self.assertNotIn("Date", self.headers) + + def test_contains_non_string_key(self): + self.assertNotIn(42, self.headers) + + def test_iter(self): + self.assertEqual(set(iter(self.headers)), {"connection", "server"}) + + def test_len(self): + self.assertEqual(len(self.headers), 2) + + def test_getitem(self): + self.assertEqual(self.headers["Server"], "websockets") + + def test_getitem_case_insensitive(self): + self.assertEqual(self.headers["server"], "websockets") + + def test_getitem_key_error(self): + with self.assertRaises(KeyError): + self.headers["Upgrade"] + + def test_getitem_multiple_values_error(self): + self.headers["Server"] = "2" + with self.assertRaises(MultipleValuesError): + self.headers["Server"] + + def test_setitem(self): + self.headers["Upgrade"] = "websocket" + self.assertEqual(self.headers["Upgrade"], "websocket") + + def test_setitem_case_insensitive(self): + self.headers["upgrade"] = "websocket" + self.assertEqual(self.headers["Upgrade"], "websocket") + + def test_setitem_multiple_values(self): + self.headers["Connection"] = "close" + with self.assertRaises(MultipleValuesError): + self.headers["Connection"] + + def test_delitem(self): + del self.headers["Connection"] + with self.assertRaises(KeyError): + self.headers["Connection"] + + def test_delitem_case_insensitive(self): + del self.headers["connection"] + with self.assertRaises(KeyError): + self.headers["Connection"] + + def test_delitem_multiple_values(self): + self.headers["Connection"] = "close" + del self.headers["Connection"] + with self.assertRaises(KeyError): + self.headers["Connection"] + + def test_eq(self): + other_headers = Headers([("Connection", "Upgrade"), ("Server", "websockets")]) + self.assertEqual(self.headers, other_headers) + + def test_eq_not_equal(self): + other_headers = Headers([("Connection", "close"), ("Server", "websockets")]) + self.assertNotEqual(self.headers, other_headers) + + def test_eq_other_type(self): + self.assertNotEqual( + self.headers, "Connection: Upgrade\r\nServer: websockets\r\n\r\n" + ) + + def test_clear(self): + self.headers.clear() + self.assertFalse(self.headers) + self.assertEqual(self.headers, Headers()) + + def test_get_all(self): + self.assertEqual(self.headers.get_all("Connection"), ["Upgrade"]) + + def test_get_all_case_insensitive(self): + self.assertEqual(self.headers.get_all("connection"), ["Upgrade"]) + + def test_get_all_no_values(self): + self.assertEqual(self.headers.get_all("Upgrade"), []) + + def test_get_all_multiple_values(self): + self.headers["Connection"] = "close" + self.assertEqual(self.headers.get_all("Connection"), ["Upgrade", "close"]) + + def test_raw_items(self): + self.assertEqual( + list(self.headers.raw_items()), + [("Connection", "Upgrade"), ("Server", "websockets")], + ) diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 7ad5ad833..b800d4f91 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,7 +1,7 @@ import unittest +from websockets.datastructures import Headers from websockets.exceptions import * -from websockets.http import Headers class ExceptionsTests(unittest.TestCase): diff --git a/tests/test_handshake.py b/tests/test_handshake.py index 7d0477715..6850fec9a 100644 --- a/tests/test_handshake.py +++ b/tests/test_handshake.py @@ -1,6 +1,7 @@ import contextlib import unittest +from websockets.datastructures import Headers from websockets.exceptions import ( InvalidHandshake, InvalidHeader, @@ -9,7 +10,6 @@ ) from websockets.handshake import * from websockets.handshake import accept # private API -from websockets.http import Headers class HandshakeTests(unittest.TestCase): diff --git a/tests/test_http.py b/tests/test_http.py index 41b522c3d..b09247c3e 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,5 +1,4 @@ import asyncio -import unittest from websockets.exceptions import SecurityError from websockets.http import * @@ -134,116 +133,3 @@ async def test_line_ending(self): self.stream.feed_data(b"foo: bar\n\n") with self.assertRaises(EOFError): await read_headers(self.stream) - - -class HeadersTests(unittest.TestCase): - def setUp(self): - self.headers = Headers([("Connection", "Upgrade"), ("Server", USER_AGENT)]) - - def test_str(self): - self.assertEqual( - str(self.headers), f"Connection: Upgrade\r\nServer: {USER_AGENT}\r\n\r\n" - ) - - def test_repr(self): - self.assertEqual( - repr(self.headers), - f"Headers([('Connection', 'Upgrade'), " f"('Server', '{USER_AGENT}')])", - ) - - def test_multiple_values_error_str(self): - self.assertEqual(str(MultipleValuesError("Connection")), "'Connection'") - self.assertEqual(str(MultipleValuesError()), "") - - def test_contains(self): - self.assertIn("Server", self.headers) - - def test_contains_case_insensitive(self): - self.assertIn("server", self.headers) - - def test_contains_not_found(self): - self.assertNotIn("Date", self.headers) - - def test_contains_non_string_key(self): - self.assertNotIn(42, self.headers) - - def test_iter(self): - self.assertEqual(set(iter(self.headers)), {"connection", "server"}) - - def test_len(self): - self.assertEqual(len(self.headers), 2) - - def test_getitem(self): - self.assertEqual(self.headers["Server"], USER_AGENT) - - def test_getitem_case_insensitive(self): - self.assertEqual(self.headers["server"], USER_AGENT) - - def test_getitem_key_error(self): - with self.assertRaises(KeyError): - self.headers["Upgrade"] - - def test_getitem_multiple_values_error(self): - self.headers["Server"] = "2" - with self.assertRaises(MultipleValuesError): - self.headers["Server"] - - def test_setitem(self): - self.headers["Upgrade"] = "websocket" - self.assertEqual(self.headers["Upgrade"], "websocket") - - def test_setitem_case_insensitive(self): - self.headers["upgrade"] = "websocket" - self.assertEqual(self.headers["Upgrade"], "websocket") - - def test_setitem_multiple_values(self): - self.headers["Connection"] = "close" - with self.assertRaises(MultipleValuesError): - self.headers["Connection"] - - def test_delitem(self): - del self.headers["Connection"] - with self.assertRaises(KeyError): - self.headers["Connection"] - - def test_delitem_case_insensitive(self): - del self.headers["connection"] - with self.assertRaises(KeyError): - self.headers["Connection"] - - def test_delitem_multiple_values(self): - self.headers["Connection"] = "close" - del self.headers["Connection"] - with self.assertRaises(KeyError): - self.headers["Connection"] - - def test_eq(self): - other_headers = self.headers.copy() - self.assertEqual(self.headers, other_headers) - - def test_eq_not_equal(self): - self.assertNotEqual(self.headers, []) - - def test_clear(self): - self.headers.clear() - self.assertFalse(self.headers) - self.assertEqual(self.headers, Headers()) - - def test_get_all(self): - self.assertEqual(self.headers.get_all("Connection"), ["Upgrade"]) - - def test_get_all_case_insensitive(self): - self.assertEqual(self.headers.get_all("connection"), ["Upgrade"]) - - def test_get_all_no_values(self): - self.assertEqual(self.headers.get_all("Upgrade"), []) - - def test_get_all_multiple_values(self): - self.headers["Connection"] = "close" - self.assertEqual(self.headers.get_all("Connection"), ["Upgrade", "close"]) - - def test_raw_items(self): - self.assertEqual( - list(self.headers.raw_items()), - [("Connection", "Upgrade"), ("Server", USER_AGENT)], - ) From 1f19838c81c3bb30f94881143c43842ac09162ec Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 12:19:20 +0200 Subject: [PATCH 197/281] Move the handshake and http modules out of the way. --- docs/api.rst | 9 -- docs/changelog.rst | 4 + src/websockets/client.py | 5 +- src/websockets/handshake.py | 191 ++++----------------------- src/websockets/handshake_legacy.py | 186 ++++++++++++++++++++++++++ src/websockets/http.py | 205 +++-------------------------- src/websockets/http_legacy.py | 193 +++++++++++++++++++++++++++ src/websockets/protocol.py | 2 +- src/websockets/server.py | 5 +- tests/test_client_server.py | 5 +- tests/test_handshake.py | 192 +-------------------------- tests/test_handshake_legacy.py | 190 ++++++++++++++++++++++++++ tests/test_http.py | 137 +------------------ tests/test_http_legacy.py | 135 +++++++++++++++++++ 14 files changed, 765 insertions(+), 694 deletions(-) create mode 100644 src/websockets/handshake_legacy.py create mode 100644 src/websockets/http_legacy.py create mode 100644 tests/test_handshake_legacy.py create mode 100644 tests/test_http_legacy.py diff --git a/docs/api.rst b/docs/api.rst index f7706ee2c..b4bddaf38 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -130,12 +130,6 @@ Exceptions Low-level --------- -Opening handshake -................. - -.. automodule:: websockets.handshake - :members: - Data transfer ............. @@ -153,6 +147,3 @@ Utilities .. automodule:: websockets.headers :members: - -.. automodule:: websockets.http - :members: diff --git a/docs/changelog.rst b/docs/changelog.rst index 5de7357ca..3cda4919f 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -16,6 +16,10 @@ Changelog :exc:`~datastructures.MultipleValuesError` from :mod:`websockets.datastructures` instead of :mod:`websockets.http`. + * :mod:`websockets.handshake` is deprecated. + + * :mod:`websockets.http` is deprecated. + Aliases provide backwards compatibility for all previously public APIs. 8.1 diff --git a/src/websockets/client.py b/src/websockets/client.py index 26a369c47..f95dae060 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -23,7 +23,7 @@ ) from .extensions.base import ClientExtensionFactory, Extension from .extensions.permessage_deflate import ClientPerMessageDeflateFactory -from .handshake import build_request, check_response +from .handshake_legacy import build_request, check_response from .headers import ( build_authorization_basic, build_extension, @@ -31,7 +31,8 @@ parse_extension, parse_subprotocol, ) -from .http import USER_AGENT, read_response +from .http import USER_AGENT +from .http_legacy import read_response from .protocol import WebSocketCommonProtocol from .typing import ExtensionHeader, Origin, Subprotocol from .uri import WebSocketURI, parse_uri diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index e30a67125..f27bd1b84 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -1,187 +1,48 @@ -""" -:mod:`websockets.handshake` provides helpers for the WebSocket handshake. +import warnings -See `section 4 of RFC 6455`_. - -.. _section 4 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 - -Some checks cannot be performed because they depend too much on the -context; instead, they're documented below. - -To accept a connection, a server must: - -- Read the request, check that the method is GET, and check the headers with - :func:`check_request`, -- Send a 101 response to the client with the headers created by - :func:`build_response` if the request is valid; otherwise, send an - appropriate HTTP error code. - -To open a connection, a client must: - -- Send a GET request to the server with the headers created by - :func:`build_request`, -- Read the response, check that the status code is 101, and check the headers - with :func:`check_response`. - -""" - -import base64 -import binascii -import hashlib -import random -from typing import List - -from .datastructures import Headers, MultipleValuesError -from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade -from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade +from .datastructures import Headers __all__ = ["build_request", "check_request", "build_response", "check_response"] -GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - -def build_request(headers: Headers) -> str: - """ - Build a handshake request to send to the server. - - Update request headers passed in argument. - - :param headers: request headers - :returns: ``key`` which must be passed to :func:`check_response` - - """ - raw_key = bytes(random.getrandbits(8) for _ in range(16)) - key = base64.b64encode(raw_key).decode() - headers["Upgrade"] = "websocket" - headers["Connection"] = "Upgrade" - headers["Sec-WebSocket-Key"] = key - headers["Sec-WebSocket-Version"] = "13" - return key +GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" -def check_request(headers: Headers) -> str: - """ - Check a handshake request received from the client. - This function doesn't verify that the request is an HTTP/1.1 or higher GET - request and doesn't perform ``Host`` and ``Origin`` checks. These controls - are usually performed earlier in the HTTP request handling code. They're - the responsibility of the caller. +# Backwards compatibility with previously documented public APIs - :param headers: request headers - :returns: ``key`` which must be passed to :func:`build_response` - :raises ~websockets.exceptions.InvalidHandshake: if the handshake request - is invalid; then the server must return 400 Bad Request error - """ - connection: List[ConnectionOption] = sum( - [parse_connection(value) for value in headers.get_all("Connection")], [] +def build_request(headers: Headers) -> str: # pragma: no cover + warnings.warn( + "websockets.handshake.build_request is deprecated", DeprecationWarning ) + from .handshake_legacy import build_request - if not any(value.lower() == "upgrade" for value in connection): - raise InvalidUpgrade("Connection", ", ".join(connection)) + return build_request(headers) - upgrade: List[UpgradeProtocol] = sum( - [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] - ) - # For compatibility with non-strict implementations, ignore case when - # checking the Upgrade header. The RFC always uses "websocket", except - # in section 11.2. (IANA registration) where it uses "WebSocket". - if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): - raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) - - try: - s_w_key = headers["Sec-WebSocket-Key"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Key") - except MultipleValuesError: - raise InvalidHeader( - "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" - ) - - try: - raw_key = base64.b64decode(s_w_key.encode(), validate=True) - except binascii.Error: - raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) - if len(raw_key) != 16: - raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) - - try: - s_w_version = headers["Sec-WebSocket-Version"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Version") - except MultipleValuesError: - raise InvalidHeader( - "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found" - ) - - if s_w_version != "13": - raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version) - - return s_w_key - - -def build_response(headers: Headers, key: str) -> None: - """ - Build a handshake response to send to the client. - - Update response headers passed in argument. - - :param headers: response headers - :param key: comes from :func:`check_request` - - """ - headers["Upgrade"] = "websocket" - headers["Connection"] = "Upgrade" - headers["Sec-WebSocket-Accept"] = accept(key) - - -def check_response(headers: Headers, key: str) -> None: - """ - Check a handshake response received from the server. - - This function doesn't verify that the response is an HTTP/1.1 or higher - response with a 101 status code. These controls are the responsibility of - the caller. - - :param headers: response headers - :param key: comes from :func:`build_request` - :raises ~websockets.exceptions.InvalidHandshake: if the handshake response - is invalid - - """ - connection: List[ConnectionOption] = sum( - [parse_connection(value) for value in headers.get_all("Connection")], [] +def check_request(headers: Headers) -> str: # pragma: no cover + warnings.warn( + "websockets.handshake.check_request is deprecated", DeprecationWarning ) + from .handshake_legacy import check_request - if not any(value.lower() == "upgrade" for value in connection): - raise InvalidUpgrade("Connection", " ".join(connection)) + return check_request(headers) - upgrade: List[UpgradeProtocol] = sum( - [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] - ) - # For compatibility with non-strict implementations, ignore case when - # checking the Upgrade header. The RFC always uses "websocket", except - # in section 11.2. (IANA registration) where it uses "WebSocket". - if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): - raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) +def build_response(headers: Headers, key: str) -> None: # pragma: no cover + warnings.warn( + "websockets.handshake.build_response is deprecated", DeprecationWarning + ) + from .handshake_legacy import build_response - try: - s_w_accept = headers["Sec-WebSocket-Accept"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Accept") - except MultipleValuesError: - raise InvalidHeader( - "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found" - ) + return build_response(headers, key) - if s_w_accept != accept(key): - raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) +def check_response(headers: Headers, key: str) -> None: # pragma: no cover + warnings.warn( + "websockets.handshake.check_response is deprecated", DeprecationWarning + ) + from .handshake_legacy import check_response -def accept(key: str) -> str: - sha1 = hashlib.sha1((key + GUID).encode()).digest() - return base64.b64encode(sha1).decode() + return check_response(headers, key) diff --git a/src/websockets/handshake_legacy.py b/src/websockets/handshake_legacy.py new file mode 100644 index 000000000..3fca45545 --- /dev/null +++ b/src/websockets/handshake_legacy.py @@ -0,0 +1,186 @@ +""" +:mod:`websockets.handshake` provides helpers for the WebSocket handshake. + +See `section 4 of RFC 6455`_. + +.. _section 4 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 + +Some checks cannot be performed because they depend too much on the +context; instead, they're documented below. + +To accept a connection, a server must: + +- Read the request, check that the method is GET, and check the headers with + :func:`check_request`, +- Send a 101 response to the client with the headers created by + :func:`build_response` if the request is valid; otherwise, send an + appropriate HTTP error code. + +To open a connection, a client must: + +- Send a GET request to the server with the headers created by + :func:`build_request`, +- Read the response, check that the status code is 101, and check the headers + with :func:`check_response`. + +""" + +import base64 +import binascii +import hashlib +import random +from typing import List + +from .datastructures import Headers, MultipleValuesError +from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade +from .handshake import GUID +from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade + + +__all__ = ["build_request", "check_request", "build_response", "check_response"] + + +def build_request(headers: Headers) -> str: + """ + Build a handshake request to send to the server. + + Update request headers passed in argument. + + :param headers: request headers + :returns: ``key`` which must be passed to :func:`check_response` + + """ + raw_key = bytes(random.getrandbits(8) for _ in range(16)) + key = base64.b64encode(raw_key).decode() + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Key"] = key + headers["Sec-WebSocket-Version"] = "13" + return key + + +def check_request(headers: Headers) -> str: + """ + Check a handshake request received from the client. + + This function doesn't verify that the request is an HTTP/1.1 or higher GET + request and doesn't perform ``Host`` and ``Origin`` checks. These controls + are usually performed earlier in the HTTP request handling code. They're + the responsibility of the caller. + + :param headers: request headers + :returns: ``key`` which must be passed to :func:`build_response` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake request + is invalid; then the server must return 400 Bad Request error + + """ + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", ", ".join(connection)) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) + + try: + s_w_key = headers["Sec-WebSocket-Key"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Key") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" + ) + + try: + raw_key = base64.b64decode(s_w_key.encode(), validate=True) + except binascii.Error: + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) + if len(raw_key) != 16: + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) + + try: + s_w_version = headers["Sec-WebSocket-Version"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Version") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found" + ) + + if s_w_version != "13": + raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version) + + return s_w_key + + +def build_response(headers: Headers, key: str) -> None: + """ + Build a handshake response to send to the client. + + Update response headers passed in argument. + + :param headers: response headers + :param key: comes from :func:`check_request` + + """ + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Accept"] = accept(key) + + +def check_response(headers: Headers, key: str) -> None: + """ + Check a handshake response received from the server. + + This function doesn't verify that the response is an HTTP/1.1 or higher + response with a 101 status code. These controls are the responsibility of + the caller. + + :param headers: response headers + :param key: comes from :func:`build_request` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake response + is invalid + + """ + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade("Connection", " ".join(connection)) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade)) + + try: + s_w_accept = headers["Sec-WebSocket-Accept"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Accept") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found" + ) + + if s_w_accept != accept(key): + raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) + + +def accept(key: str) -> str: + sha1 = hashlib.sha1((key + GUID).encode()).digest() + return base64.b64encode(sha1).decode() diff --git a/src/websockets/http.py b/src/websockets/http.py index ddb2afcfa..850b9beaa 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -1,208 +1,37 @@ -""" -:mod:`websockets.http` module provides basic HTTP/1.1 support. It is merely -:adequate for WebSocket handshake messages. - -These APIs cannot be imported from :mod:`websockets`. They must be imported -from :mod:`websockets.http`. - -""" - import asyncio -import re import sys +import warnings from typing import Tuple -# For backwards compatibility - should be deprecated +# For backwards compatibility: +# Headers and MultipleValuesError used to be defined in this module from .datastructures import Headers, MultipleValuesError # noqa -from .exceptions import SecurityError from .version import version as websockets_version -__all__ = ["read_request", "read_response", "USER_AGENT"] +__all__ = ["USER_AGENT"] -MAX_HEADERS = 256 -MAX_LINE = 4096 PYTHON_VERSION = "{}.{}".format(*sys.version_info) USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}" -def d(value: bytes) -> str: - """ - Decode a bytestring for interpolating into an error message. - - """ - return value.decode(errors="backslashreplace") - - -# See https://tools.ietf.org/html/rfc7230#appendix-B. - -# Regex for validating header names. - -_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") - -# Regex for validating header values. - -# We don't attempt to support obsolete line folding. - -# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff). - -# The ABNF is complicated because it attempts to express that optional -# whitespace is ignored. We strip whitespace and don't revalidate that. - -# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 - -_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") - - -async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]: - """ - Read an HTTP/1.1 GET request and return ``(path, headers)``. - - ``path`` isn't URL-decoded or validated in any way. - - ``path`` and ``headers`` are expected to contain only ASCII characters. - Other characters are represented with surrogate escapes. - - :func:`read_request` doesn't attempt to read the request body because - WebSocket handshake requests don't have one. If the request contains a - body, it may be read from ``stream`` after this coroutine returns. - - :param stream: input to read the request from - :raises EOFError: if the connection is closed without a full HTTP request - :raises SecurityError: if the request exceeds a security limit - :raises ValueError: if the request isn't well formatted - - """ - # https://tools.ietf.org/html/rfc7230#section-3.1.1 - - # Parsing is simple because fixed values are expected for method and - # version and because path isn't checked. Since WebSocket software tends - # to implement HTTP/1.1 strictly, there's little need for lenient parsing. - - try: - request_line = await read_line(stream) - except EOFError as exc: - raise EOFError("connection closed while reading HTTP request line") from exc - - try: - method, raw_path, version = request_line.split(b" ", 2) - except ValueError: # not enough values to unpack (expected 3, got 1-2) - raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None - - if method != b"GET": - raise ValueError(f"unsupported HTTP method: {d(method)}") - if version != b"HTTP/1.1": - raise ValueError(f"unsupported HTTP version: {d(version)}") - path = raw_path.decode("ascii", "surrogateescape") - - headers = await read_headers(stream) - - return path, headers - - -async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]: - """ - Read an HTTP/1.1 response and return ``(status_code, reason, headers)``. - - ``reason`` and ``headers`` are expected to contain only ASCII characters. - Other characters are represented with surrogate escapes. - - :func:`read_request` doesn't attempt to read the response body because - WebSocket handshake responses don't have one. If the response contains a - body, it may be read from ``stream`` after this coroutine returns. - - :param stream: input to read the response from - :raises EOFError: if the connection is closed without a full HTTP response - :raises SecurityError: if the response exceeds a security limit - :raises ValueError: if the response isn't well formatted - - """ - # https://tools.ietf.org/html/rfc7230#section-3.1.2 - - # As in read_request, parsing is simple because a fixed value is expected - # for version, status_code is a 3-digit number, and reason can be ignored. - - try: - status_line = await read_line(stream) - except EOFError as exc: - raise EOFError("connection closed while reading HTTP status line") from exc - - try: - version, raw_status_code, raw_reason = status_line.split(b" ", 2) - except ValueError: # not enough values to unpack (expected 3, got 1-2) - raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None - - if version != b"HTTP/1.1": - raise ValueError(f"unsupported HTTP version: {d(version)}") - try: - status_code = int(raw_status_code) - except ValueError: # invalid literal for int() with base 10 - raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None - if not 100 <= status_code < 1000: - raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") - if not _value_re.fullmatch(raw_reason): - raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") - reason = raw_reason.decode() - - headers = await read_headers(stream) - - return status_code, reason, headers - - -async def read_headers(stream: asyncio.StreamReader) -> Headers: - """ - Read HTTP headers from ``stream``. - - Non-ASCII characters are represented with surrogate escapes. - - """ - # https://tools.ietf.org/html/rfc7230#section-3.2 - - # We don't attempt to support obsolete line folding. - - headers = Headers() - for _ in range(MAX_HEADERS + 1): - try: - line = await read_line(stream) - except EOFError as exc: - raise EOFError("connection closed while reading HTTP headers") from exc - if line == b"": - break - - try: - raw_name, raw_value = line.split(b":", 1) - except ValueError: # not enough values to unpack (expected 2, got 1) - raise ValueError(f"invalid HTTP header line: {d(line)}") from None - if not _token_re.fullmatch(raw_name): - raise ValueError(f"invalid HTTP header name: {d(raw_name)}") - raw_value = raw_value.strip(b" \t") - if not _value_re.fullmatch(raw_value): - raise ValueError(f"invalid HTTP header value: {d(raw_value)}") - - name = raw_name.decode("ascii") # guaranteed to be ASCII at this point - value = raw_value.decode("ascii", "surrogateescape") - headers[name] = value +# Backwards compatibility with previously documented public APIs - else: - raise SecurityError("too many HTTP headers") - return headers +async def read_request( + stream: asyncio.StreamReader, +) -> Tuple[str, Headers]: # pragma: no cover + warnings.warn("websockets.http.read_request is deprecated", DeprecationWarning) + from .http_legacy import read_request + return await read_request(stream) -async def read_line(stream: asyncio.StreamReader) -> bytes: - """ - Read a single line from ``stream``. - CRLF is stripped from the return value. +async def read_response( + stream: asyncio.StreamReader, +) -> Tuple[int, str, Headers]: # pragma: no cover + warnings.warn("websockets.http.read_response is deprecated", DeprecationWarning) + from .http_legacy import read_response - """ - # Security: this is bounded by the StreamReader's limit (default = 32 KiB). - line = await stream.readline() - # Security: this guarantees header values are small (hard-coded = 4 KiB) - if len(line) > MAX_LINE: - raise SecurityError("line too long") - # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 - if not line.endswith(b"\r\n"): - raise EOFError("line without CRLF") - return line[:-2] + return await read_response(stream) diff --git a/src/websockets/http_legacy.py b/src/websockets/http_legacy.py new file mode 100644 index 000000000..3630d3593 --- /dev/null +++ b/src/websockets/http_legacy.py @@ -0,0 +1,193 @@ +import asyncio +import re +from typing import Tuple + +from .datastructures import Headers +from .exceptions import SecurityError + + +__all__ = ["read_request", "read_response"] + +MAX_HEADERS = 256 +MAX_LINE = 4096 + + +def d(value: bytes) -> str: + """ + Decode a bytestring for interpolating into an error message. + + """ + return value.decode(errors="backslashreplace") + + +# See https://tools.ietf.org/html/rfc7230#appendix-B. + +# Regex for validating header names. + +_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") + +# Regex for validating header values. + +# We don't attempt to support obsolete line folding. + +# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff). + +# The ABNF is complicated because it attempts to express that optional +# whitespace is ignored. We strip whitespace and don't revalidate that. + +# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 + +_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") + + +async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]: + """ + Read an HTTP/1.1 GET request and return ``(path, headers)``. + + ``path`` isn't URL-decoded or validated in any way. + + ``path`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`read_request` doesn't attempt to read the request body because + WebSocket handshake requests don't have one. If the request contains a + body, it may be read from ``stream`` after this coroutine returns. + + :param stream: input to read the request from + :raises EOFError: if the connection is closed without a full HTTP request + :raises SecurityError: if the request exceeds a security limit + :raises ValueError: if the request isn't well formatted + + """ + # https://tools.ietf.org/html/rfc7230#section-3.1.1 + + # Parsing is simple because fixed values are expected for method and + # version and because path isn't checked. Since WebSocket software tends + # to implement HTTP/1.1 strictly, there's little need for lenient parsing. + + try: + request_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP request line") from exc + + try: + method, raw_path, version = request_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None + + if method != b"GET": + raise ValueError(f"unsupported HTTP method: {d(method)}") + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + path = raw_path.decode("ascii", "surrogateescape") + + headers = await read_headers(stream) + + return path, headers + + +async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]: + """ + Read an HTTP/1.1 response and return ``(status_code, reason, headers)``. + + ``reason`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`read_request` doesn't attempt to read the response body because + WebSocket handshake responses don't have one. If the response contains a + body, it may be read from ``stream`` after this coroutine returns. + + :param stream: input to read the response from + :raises EOFError: if the connection is closed without a full HTTP response + :raises SecurityError: if the response exceeds a security limit + :raises ValueError: if the response isn't well formatted + + """ + # https://tools.ietf.org/html/rfc7230#section-3.1.2 + + # As in read_request, parsing is simple because a fixed value is expected + # for version, status_code is a 3-digit number, and reason can be ignored. + + try: + status_line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP status line") from exc + + try: + version, raw_status_code, raw_reason = status_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None + + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + try: + status_code = int(raw_status_code) + except ValueError: # invalid literal for int() with base 10 + raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None + if not 100 <= status_code < 1000: + raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") + if not _value_re.fullmatch(raw_reason): + raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") + reason = raw_reason.decode() + + headers = await read_headers(stream) + + return status_code, reason, headers + + +async def read_headers(stream: asyncio.StreamReader) -> Headers: + """ + Read HTTP headers from ``stream``. + + Non-ASCII characters are represented with surrogate escapes. + + """ + # https://tools.ietf.org/html/rfc7230#section-3.2 + + # We don't attempt to support obsolete line folding. + + headers = Headers() + for _ in range(MAX_HEADERS + 1): + try: + line = await read_line(stream) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP headers") from exc + if line == b"": + break + + try: + raw_name, raw_value = line.split(b":", 1) + except ValueError: # not enough values to unpack (expected 2, got 1) + raise ValueError(f"invalid HTTP header line: {d(line)}") from None + if not _token_re.fullmatch(raw_name): + raise ValueError(f"invalid HTTP header name: {d(raw_name)}") + raw_value = raw_value.strip(b" \t") + if not _value_re.fullmatch(raw_value): + raise ValueError(f"invalid HTTP header value: {d(raw_value)}") + + name = raw_name.decode("ascii") # guaranteed to be ASCII at this point + value = raw_value.decode("ascii", "surrogateescape") + headers[name] = value + + else: + raise SecurityError("too many HTTP headers") + + return headers + + +async def read_line(stream: asyncio.StreamReader) -> bytes: + """ + Read a single line from ``stream``. + + CRLF is stripped from the return value. + + """ + # Security: this is bounded by the StreamReader's limit (default = 32 KiB). + line = await stream.readline() + # Security: this guarantees header values are small (hard-coded = 4 KiB) + if len(line) > MAX_LINE: + raise SecurityError("line too long") + # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 + if not line.endswith(b"\r\n"): + raise EOFError("line without CRLF") + return line[:-2] diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 60235643e..cc4416ba8 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -41,7 +41,7 @@ ) from .extensions.base import Extension from .framing import * -from .handshake import * +from .handshake_legacy import * from .typing import Data diff --git a/src/websockets/server.py b/src/websockets/server.py index da98cac05..522c76114 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -40,9 +40,10 @@ ) from .extensions.base import Extension, ServerExtensionFactory from .extensions.permessage_deflate import ServerPerMessageDeflateFactory -from .handshake import build_response, check_request +from .handshake_legacy import build_response, check_request from .headers import build_extension, parse_extension, parse_subprotocol -from .http import USER_AGENT, read_request +from .http import USER_AGENT +from .http_legacy import read_request from .protocol import WebSocketCommonProtocol from .typing import ExtensionHeader, Origin, Subprotocol diff --git a/tests/test_client_server.py b/tests/test_client_server.py index ba0984c80..db26d6583 100644 --- a/tests/test_client_server.py +++ b/tests/test_client_server.py @@ -27,8 +27,9 @@ PerMessageDeflate, ServerPerMessageDeflateFactory, ) -from websockets.handshake import build_response -from websockets.http import USER_AGENT, read_response +from websockets.handshake_legacy import build_response +from websockets.http import USER_AGENT +from websockets.http_legacy import read_response from websockets.protocol import State from websockets.server import * from websockets.uri import parse_uri diff --git a/tests/test_handshake.py b/tests/test_handshake.py index 6850fec9a..8c35c9714 100644 --- a/tests/test_handshake.py +++ b/tests/test_handshake.py @@ -1,190 +1,2 @@ -import contextlib -import unittest - -from websockets.datastructures import Headers -from websockets.exceptions import ( - InvalidHandshake, - InvalidHeader, - InvalidHeaderValue, - InvalidUpgrade, -) -from websockets.handshake import * -from websockets.handshake import accept # private API - - -class HandshakeTests(unittest.TestCase): - def test_accept(self): - # Test vector from RFC 6455 - key = "dGhlIHNhbXBsZSBub25jZQ==" - acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo=" - self.assertEqual(accept(key), acc) - - def test_round_trip(self): - request_headers = Headers() - request_key = build_request(request_headers) - response_key = check_request(request_headers) - self.assertEqual(request_key, response_key) - response_headers = Headers() - build_response(response_headers, response_key) - check_response(response_headers, request_key) - - @contextlib.contextmanager - def assertValidRequestHeaders(self): - """ - Provide request headers for modification. - - Assert that the transformation kept them valid. - - """ - headers = Headers() - build_request(headers) - yield headers - check_request(headers) - - @contextlib.contextmanager - def assertInvalidRequestHeaders(self, exc_type): - """ - Provide request headers for modification. - - Assert that the transformation made them invalid. - - """ - headers = Headers() - build_request(headers) - yield headers - assert issubclass(exc_type, InvalidHandshake) - with self.assertRaises(exc_type): - check_request(headers) - - def test_request_invalid_connection(self): - with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers["Connection"] - headers["Connection"] = "Downgrade" - - def test_request_missing_connection(self): - with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers["Connection"] - - def test_request_additional_connection(self): - with self.assertValidRequestHeaders() as headers: - headers["Connection"] = "close" - - def test_request_invalid_upgrade(self): - with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers["Upgrade"] - headers["Upgrade"] = "socketweb" - - def test_request_missing_upgrade(self): - with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - del headers["Upgrade"] - - def test_request_additional_upgrade(self): - with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: - headers["Upgrade"] = "socketweb" - - def test_request_invalid_key_not_base64(self): - with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers["Sec-WebSocket-Key"] - headers["Sec-WebSocket-Key"] = "!@#$%^&*()" - - def test_request_invalid_key_not_well_padded(self): - with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers["Sec-WebSocket-Key"] - headers["Sec-WebSocket-Key"] = "CSIRmL8dWYxeAdr/XpEHRw" - - def test_request_invalid_key_not_16_bytes_long(self): - with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers["Sec-WebSocket-Key"] - headers["Sec-WebSocket-Key"] = "ZLpprpvK4PE=" - - def test_request_missing_key(self): - with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - del headers["Sec-WebSocket-Key"] - - def test_request_additional_key(self): - with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - # This duplicates the Sec-WebSocket-Key header. - headers["Sec-WebSocket-Key"] = headers["Sec-WebSocket-Key"] - - def test_request_invalid_version(self): - with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: - del headers["Sec-WebSocket-Version"] - headers["Sec-WebSocket-Version"] = "42" - - def test_request_missing_version(self): - with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - del headers["Sec-WebSocket-Version"] - - def test_request_additional_version(self): - with self.assertInvalidRequestHeaders(InvalidHeader) as headers: - # This duplicates the Sec-WebSocket-Version header. - headers["Sec-WebSocket-Version"] = headers["Sec-WebSocket-Version"] - - @contextlib.contextmanager - def assertValidResponseHeaders(self, key="CSIRmL8dWYxeAdr/XpEHRw=="): - """ - Provide response headers for modification. - - Assert that the transformation kept them valid. - - """ - headers = Headers() - build_response(headers, key) - yield headers - check_response(headers, key) - - @contextlib.contextmanager - def assertInvalidResponseHeaders(self, exc_type, key="CSIRmL8dWYxeAdr/XpEHRw=="): - """ - Provide response headers for modification. - - Assert that the transformation made them invalid. - - """ - headers = Headers() - build_response(headers, key) - yield headers - assert issubclass(exc_type, InvalidHandshake) - with self.assertRaises(exc_type): - check_response(headers, key) - - def test_response_invalid_connection(self): - with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers["Connection"] - headers["Connection"] = "Downgrade" - - def test_response_missing_connection(self): - with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers["Connection"] - - def test_response_additional_connection(self): - with self.assertValidResponseHeaders() as headers: - headers["Connection"] = "close" - - def test_response_invalid_upgrade(self): - with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers["Upgrade"] - headers["Upgrade"] = "socketweb" - - def test_response_missing_upgrade(self): - with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - del headers["Upgrade"] - - def test_response_additional_upgrade(self): - with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: - headers["Upgrade"] = "socketweb" - - def test_response_invalid_accept(self): - with self.assertInvalidResponseHeaders(InvalidHeaderValue) as headers: - del headers["Sec-WebSocket-Accept"] - other_key = "1Eq4UDEFQYg3YspNgqxv5g==" - headers["Sec-WebSocket-Accept"] = accept(other_key) - - def test_response_missing_accept(self): - with self.assertInvalidResponseHeaders(InvalidHeader) as headers: - del headers["Sec-WebSocket-Accept"] - - def test_response_additional_accept(self): - with self.assertInvalidResponseHeaders(InvalidHeader) as headers: - # This duplicates the Sec-WebSocket-Accept header. - headers["Sec-WebSocket-Accept"] = headers["Sec-WebSocket-Accept"] +# Check that the legacy handshake module imports without an exception. +from websockets.handshake import * # noqa diff --git a/tests/test_handshake_legacy.py b/tests/test_handshake_legacy.py new file mode 100644 index 000000000..361410d3f --- /dev/null +++ b/tests/test_handshake_legacy.py @@ -0,0 +1,190 @@ +import contextlib +import unittest + +from websockets.datastructures import Headers +from websockets.exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidHeaderValue, + InvalidUpgrade, +) +from websockets.handshake_legacy import * +from websockets.handshake_legacy import accept # private API + + +class HandshakeTests(unittest.TestCase): + def test_accept(self): + # Test vector from RFC 6455 + key = "dGhlIHNhbXBsZSBub25jZQ==" + acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo=" + self.assertEqual(accept(key), acc) + + def test_round_trip(self): + request_headers = Headers() + request_key = build_request(request_headers) + response_key = check_request(request_headers) + self.assertEqual(request_key, response_key) + response_headers = Headers() + build_response(response_headers, response_key) + check_response(response_headers, request_key) + + @contextlib.contextmanager + def assertValidRequestHeaders(self): + """ + Provide request headers for modification. + + Assert that the transformation kept them valid. + + """ + headers = Headers() + build_request(headers) + yield headers + check_request(headers) + + @contextlib.contextmanager + def assertInvalidRequestHeaders(self, exc_type): + """ + Provide request headers for modification. + + Assert that the transformation made them invalid. + + """ + headers = Headers() + build_request(headers) + yield headers + assert issubclass(exc_type, InvalidHandshake) + with self.assertRaises(exc_type): + check_request(headers) + + def test_request_invalid_connection(self): + with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: + del headers["Connection"] + headers["Connection"] = "Downgrade" + + def test_request_missing_connection(self): + with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: + del headers["Connection"] + + def test_request_additional_connection(self): + with self.assertValidRequestHeaders() as headers: + headers["Connection"] = "close" + + def test_request_invalid_upgrade(self): + with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: + del headers["Upgrade"] + headers["Upgrade"] = "socketweb" + + def test_request_missing_upgrade(self): + with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: + del headers["Upgrade"] + + def test_request_additional_upgrade(self): + with self.assertInvalidRequestHeaders(InvalidUpgrade) as headers: + headers["Upgrade"] = "socketweb" + + def test_request_invalid_key_not_base64(self): + with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "!@#$%^&*()" + + def test_request_invalid_key_not_well_padded(self): + with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "CSIRmL8dWYxeAdr/XpEHRw" + + def test_request_invalid_key_not_16_bytes_long(self): + with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: + del headers["Sec-WebSocket-Key"] + headers["Sec-WebSocket-Key"] = "ZLpprpvK4PE=" + + def test_request_missing_key(self): + with self.assertInvalidRequestHeaders(InvalidHeader) as headers: + del headers["Sec-WebSocket-Key"] + + def test_request_additional_key(self): + with self.assertInvalidRequestHeaders(InvalidHeader) as headers: + # This duplicates the Sec-WebSocket-Key header. + headers["Sec-WebSocket-Key"] = headers["Sec-WebSocket-Key"] + + def test_request_invalid_version(self): + with self.assertInvalidRequestHeaders(InvalidHeaderValue) as headers: + del headers["Sec-WebSocket-Version"] + headers["Sec-WebSocket-Version"] = "42" + + def test_request_missing_version(self): + with self.assertInvalidRequestHeaders(InvalidHeader) as headers: + del headers["Sec-WebSocket-Version"] + + def test_request_additional_version(self): + with self.assertInvalidRequestHeaders(InvalidHeader) as headers: + # This duplicates the Sec-WebSocket-Version header. + headers["Sec-WebSocket-Version"] = headers["Sec-WebSocket-Version"] + + @contextlib.contextmanager + def assertValidResponseHeaders(self, key="CSIRmL8dWYxeAdr/XpEHRw=="): + """ + Provide response headers for modification. + + Assert that the transformation kept them valid. + + """ + headers = Headers() + build_response(headers, key) + yield headers + check_response(headers, key) + + @contextlib.contextmanager + def assertInvalidResponseHeaders(self, exc_type, key="CSIRmL8dWYxeAdr/XpEHRw=="): + """ + Provide response headers for modification. + + Assert that the transformation made them invalid. + + """ + headers = Headers() + build_response(headers, key) + yield headers + assert issubclass(exc_type, InvalidHandshake) + with self.assertRaises(exc_type): + check_response(headers, key) + + def test_response_invalid_connection(self): + with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: + del headers["Connection"] + headers["Connection"] = "Downgrade" + + def test_response_missing_connection(self): + with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: + del headers["Connection"] + + def test_response_additional_connection(self): + with self.assertValidResponseHeaders() as headers: + headers["Connection"] = "close" + + def test_response_invalid_upgrade(self): + with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: + del headers["Upgrade"] + headers["Upgrade"] = "socketweb" + + def test_response_missing_upgrade(self): + with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: + del headers["Upgrade"] + + def test_response_additional_upgrade(self): + with self.assertInvalidResponseHeaders(InvalidUpgrade) as headers: + headers["Upgrade"] = "socketweb" + + def test_response_invalid_accept(self): + with self.assertInvalidResponseHeaders(InvalidHeaderValue) as headers: + del headers["Sec-WebSocket-Accept"] + other_key = "1Eq4UDEFQYg3YspNgqxv5g==" + headers["Sec-WebSocket-Accept"] = accept(other_key) + + def test_response_missing_accept(self): + with self.assertInvalidResponseHeaders(InvalidHeader) as headers: + del headers["Sec-WebSocket-Accept"] + + def test_response_additional_accept(self): + with self.assertInvalidResponseHeaders(InvalidHeader) as headers: + # This duplicates the Sec-WebSocket-Accept header. + headers["Sec-WebSocket-Accept"] = headers["Sec-WebSocket-Accept"] diff --git a/tests/test_http.py b/tests/test_http.py index b09247c3e..322650354 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,135 +1,2 @@ -import asyncio - -from websockets.exceptions import SecurityError -from websockets.http import * -from websockets.http import read_headers - -from .utils import AsyncioTestCase - - -class HTTPAsyncTests(AsyncioTestCase): - def setUp(self): - super().setUp() - self.stream = asyncio.StreamReader(loop=self.loop) - - async def test_read_request(self): - # Example from the protocol overview in RFC 6455 - self.stream.feed_data( - b"GET /chat HTTP/1.1\r\n" - b"Host: server.example.com\r\n" - b"Upgrade: websocket\r\n" - b"Connection: Upgrade\r\n" - b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" - b"Origin: http://example.com\r\n" - b"Sec-WebSocket-Protocol: chat, superchat\r\n" - b"Sec-WebSocket-Version: 13\r\n" - b"\r\n" - ) - path, headers = await read_request(self.stream) - self.assertEqual(path, "/chat") - self.assertEqual(headers["Upgrade"], "websocket") - - async def test_read_request_empty(self): - self.stream.feed_eof() - with self.assertRaisesRegex( - EOFError, "connection closed while reading HTTP request line" - ): - await read_request(self.stream) - - async def test_read_request_invalid_request_line(self): - self.stream.feed_data(b"GET /\r\n\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP request line: GET /"): - await read_request(self.stream) - - async def test_read_request_unsupported_method(self): - self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") - with self.assertRaisesRegex(ValueError, "unsupported HTTP method: OPTIONS"): - await read_request(self.stream) - - async def test_read_request_unsupported_version(self): - self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") - with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): - await read_request(self.stream) - - async def test_read_request_invalid_header(self): - self.stream.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): - await read_request(self.stream) - - async def test_read_response(self): - # Example from the protocol overview in RFC 6455 - self.stream.feed_data( - b"HTTP/1.1 101 Switching Protocols\r\n" - b"Upgrade: websocket\r\n" - b"Connection: Upgrade\r\n" - b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" - b"Sec-WebSocket-Protocol: chat\r\n" - b"\r\n" - ) - status_code, reason, headers = await read_response(self.stream) - self.assertEqual(status_code, 101) - self.assertEqual(reason, "Switching Protocols") - self.assertEqual(headers["Upgrade"], "websocket") - - async def test_read_response_empty(self): - self.stream.feed_eof() - with self.assertRaisesRegex( - EOFError, "connection closed while reading HTTP status line" - ): - await read_response(self.stream) - - async def test_read_request_invalid_status_line(self): - self.stream.feed_data(b"Hello!\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP status line: Hello!"): - await read_response(self.stream) - - async def test_read_response_unsupported_version(self): - self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") - with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): - await read_response(self.stream) - - async def test_read_response_invalid_status(self): - self.stream.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP status code: OMG"): - await read_response(self.stream) - - async def test_read_response_unsupported_status(self): - self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") - with self.assertRaisesRegex(ValueError, "unsupported HTTP status code: 007"): - await read_response(self.stream) - - async def test_read_response_invalid_reason(self): - self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP reason phrase: \\x7f"): - await read_response(self.stream) - - async def test_read_response_invalid_header(self): - self.stream.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") - with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): - await read_response(self.stream) - - async def test_header_name(self): - self.stream.feed_data(b"foo bar: baz qux\r\n\r\n") - with self.assertRaises(ValueError): - await read_headers(self.stream) - - async def test_header_value(self): - self.stream.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") - with self.assertRaises(ValueError): - await read_headers(self.stream) - - async def test_headers_limit(self): - self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") - with self.assertRaises(SecurityError): - await read_headers(self.stream) - - async def test_line_limit(self): - # Header line contains 5 + 4090 + 2 = 4097 bytes. - self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") - with self.assertRaises(SecurityError): - await read_headers(self.stream) - - async def test_line_ending(self): - self.stream.feed_data(b"foo: bar\n\n") - with self.assertRaises(EOFError): - await read_headers(self.stream) +# Check that the legacy http module imports without an exception. +from websockets.http import * # noqa diff --git a/tests/test_http_legacy.py b/tests/test_http_legacy.py new file mode 100644 index 000000000..3b43a6274 --- /dev/null +++ b/tests/test_http_legacy.py @@ -0,0 +1,135 @@ +import asyncio + +from websockets.exceptions import SecurityError +from websockets.http_legacy import * +from websockets.http_legacy import read_headers + +from .utils import AsyncioTestCase + + +class HTTPAsyncTests(AsyncioTestCase): + def setUp(self): + super().setUp() + self.stream = asyncio.StreamReader(loop=self.loop) + + async def test_read_request(self): + # Example from the protocol overview in RFC 6455 + self.stream.feed_data( + b"GET /chat HTTP/1.1\r\n" + b"Host: server.example.com\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + b"Origin: http://example.com\r\n" + b"Sec-WebSocket-Protocol: chat, superchat\r\n" + b"Sec-WebSocket-Version: 13\r\n" + b"\r\n" + ) + path, headers = await read_request(self.stream) + self.assertEqual(path, "/chat") + self.assertEqual(headers["Upgrade"], "websocket") + + async def test_read_request_empty(self): + self.stream.feed_eof() + with self.assertRaisesRegex( + EOFError, "connection closed while reading HTTP request line" + ): + await read_request(self.stream) + + async def test_read_request_invalid_request_line(self): + self.stream.feed_data(b"GET /\r\n\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP request line: GET /"): + await read_request(self.stream) + + async def test_read_request_unsupported_method(self): + self.stream.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP method: OPTIONS"): + await read_request(self.stream) + + async def test_read_request_unsupported_version(self): + self.stream.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): + await read_request(self.stream) + + async def test_read_request_invalid_header(self): + self.stream.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): + await read_request(self.stream) + + async def test_read_response(self): + # Example from the protocol overview in RFC 6455 + self.stream.feed_data( + b"HTTP/1.1 101 Switching Protocols\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" + b"Sec-WebSocket-Protocol: chat\r\n" + b"\r\n" + ) + status_code, reason, headers = await read_response(self.stream) + self.assertEqual(status_code, 101) + self.assertEqual(reason, "Switching Protocols") + self.assertEqual(headers["Upgrade"], "websocket") + + async def test_read_response_empty(self): + self.stream.feed_eof() + with self.assertRaisesRegex( + EOFError, "connection closed while reading HTTP status line" + ): + await read_response(self.stream) + + async def test_read_request_invalid_status_line(self): + self.stream.feed_data(b"Hello!\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP status line: Hello!"): + await read_response(self.stream) + + async def test_read_response_unsupported_version(self): + self.stream.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP version: HTTP/1.0"): + await read_response(self.stream) + + async def test_read_response_invalid_status(self): + self.stream.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP status code: OMG"): + await read_response(self.stream) + + async def test_read_response_unsupported_status(self): + self.stream.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") + with self.assertRaisesRegex(ValueError, "unsupported HTTP status code: 007"): + await read_response(self.stream) + + async def test_read_response_invalid_reason(self): + self.stream.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP reason phrase: \\x7f"): + await read_response(self.stream) + + async def test_read_response_invalid_header(self): + self.stream.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") + with self.assertRaisesRegex(ValueError, "invalid HTTP header line: Oops"): + await read_response(self.stream) + + async def test_header_name(self): + self.stream.feed_data(b"foo bar: baz qux\r\n\r\n") + with self.assertRaises(ValueError): + await read_headers(self.stream) + + async def test_header_value(self): + self.stream.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") + with self.assertRaises(ValueError): + await read_headers(self.stream) + + async def test_headers_limit(self): + self.stream.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") + with self.assertRaises(SecurityError): + await read_headers(self.stream) + + async def test_line_limit(self): + # Header line contains 5 + 4090 + 2 = 4097 bytes. + self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") + with self.assertRaises(SecurityError): + await read_headers(self.stream) + + async def test_line_ending(self): + self.stream.feed_data(b"foo: bar\n\n") + with self.assertRaises(EOFError): + await read_headers(self.stream) From 1c99e5b9fabd3b431c5697a90193ef8e1cd17d58 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 12:34:07 +0200 Subject: [PATCH 198/281] Move all type definitions to the typing module. --- src/websockets/handshake_legacy.py | 3 ++- src/websockets/headers.py | 14 +++++++++----- src/websockets/typing.py | 14 ++++++++++---- tests/test_typing.py | 1 + 4 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 tests/test_typing.py diff --git a/src/websockets/handshake_legacy.py b/src/websockets/handshake_legacy.py index 3fca45545..9683e8556 100644 --- a/src/websockets/handshake_legacy.py +++ b/src/websockets/handshake_legacy.py @@ -34,7 +34,8 @@ from .datastructures import Headers, MultipleValuesError from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade from .handshake import GUID -from .headers import ConnectionOption, UpgradeProtocol, parse_connection, parse_upgrade +from .headers import parse_connection, parse_upgrade +from .typing import ConnectionOption, UpgradeProtocol __all__ = ["build_request", "check_request", "build_response", "check_response"] diff --git a/src/websockets/headers.py b/src/websockets/headers.py index f33c94c04..256c66bb1 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -10,10 +10,17 @@ import base64 import binascii import re -from typing import Callable, List, NewType, Optional, Sequence, Tuple, TypeVar, cast +from typing import Callable, List, Optional, Sequence, Tuple, TypeVar, cast from .exceptions import InvalidHeaderFormat, InvalidHeaderValue -from .typing import ExtensionHeader, ExtensionName, ExtensionParameter, Subprotocol +from .typing import ( + ConnectionOption, + ExtensionHeader, + ExtensionName, + ExtensionParameter, + Subprotocol, + UpgradeProtocol, +) __all__ = [ @@ -31,9 +38,6 @@ T = TypeVar("T") -ConnectionOption = NewType("ConnectionOption", str) -UpgradeProtocol = NewType("UpgradeProtocol", str) - # To avoid a dependency on a parsing library, we implement manually the ABNF # described in https://tools.ietf.org/html/rfc6455#section-9.1 with the diff --git a/src/websockets/typing.py b/src/websockets/typing.py index a5062bc4b..ca66a8c54 100644 --- a/src/websockets/typing.py +++ b/src/websockets/typing.py @@ -28,7 +28,6 @@ ExtensionParameter = Tuple[str, Optional[str]] - ExtensionParameter__doc__ = """Parameter of a WebSocket extension""" try: ExtensionParameter.__doc__ = ExtensionParameter__doc__ @@ -37,8 +36,7 @@ ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]] - -ExtensionHeader__doc__ = """Item parsed in a Sec-WebSocket-Extensions header""" +ExtensionHeader__doc__ = """Extension in a Sec-WebSocket-Extensions header""" try: ExtensionHeader.__doc__ = ExtensionHeader__doc__ except AttributeError: # pragma: no cover @@ -46,4 +44,12 @@ Subprotocol = NewType("Subprotocol", str) -Subprotocol.__doc__ = """Items parsed in a Sec-WebSocket-Protocol header""" +Subprotocol.__doc__ = """Subprotocol value in a Sec-WebSocket-Protocol header""" + + +ConnectionOption = NewType("ConnectionOption", str) +ConnectionOption.__doc__ = """Connection option in a Connection header""" + + +UpgradeProtocol = NewType("UpgradeProtocol", str) +UpgradeProtocol.__doc__ = """Upgrade protocol in an Upgrade header""" diff --git a/tests/test_typing.py b/tests/test_typing.py new file mode 100644 index 000000000..6eb1fe6c5 --- /dev/null +++ b/tests/test_typing.py @@ -0,0 +1 @@ +from websockets.typing import * # noqa From 80aea12a584b504f77e5a186c4c6b26444233297 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 25 Jan 2020 19:37:12 +0100 Subject: [PATCH 199/281] Add a StreamReader based on generator coroutines. --- src/websockets/streams.py | 115 ++++++++++++++++++++++++++++++ tests/test_streams.py | 146 ++++++++++++++++++++++++++++++++++++++ tests/utils.py | 18 +++++ 3 files changed, 279 insertions(+) create mode 100644 src/websockets/streams.py create mode 100644 tests/test_streams.py diff --git a/src/websockets/streams.py b/src/websockets/streams.py new file mode 100644 index 000000000..6f3163034 --- /dev/null +++ b/src/websockets/streams.py @@ -0,0 +1,115 @@ +from typing import Generator + + +class StreamReader: + """ + Generator-based stream reader. + + This class doesn't support concurrent calls to :meth:`read_line()`, + :meth:`read_exact()`, or :meth:`read_to_eof()`. Make sure calls are + serialized. + + """ + + def __init__(self) -> None: + self.buffer = bytearray() + self.eof = False + + def read_line(self) -> Generator[None, None, bytes]: + """ + Read a LF-terminated line from the stream. + + The return value includes the LF character. + + This is a generator-based coroutine. + + :raises EOFError: if the stream ends without a LF + + """ + n = 0 # number of bytes to read + p = 0 # number of bytes without a newline + while True: + n = self.buffer.find(b"\n", p) + 1 + if n > 0: + break + p = len(self.buffer) + if self.eof: + raise EOFError(f"stream ends after {p} bytes, before end of line") + yield + r = self.buffer[:n] + del self.buffer[:n] + return r + + def read_exact(self, n: int) -> Generator[None, None, bytes]: + """ + Read ``n`` bytes from the stream. + + This is a generator-based coroutine. + + :raises EOFError: if the stream ends in less than ``n`` bytes + + """ + assert n >= 0 + while len(self.buffer) < n: + if self.eof: + p = len(self.buffer) + raise EOFError(f"stream ends after {p} bytes, expected {n} bytes") + yield + r = self.buffer[:n] + del self.buffer[:n] + return r + + def read_to_eof(self) -> Generator[None, None, bytes]: + """ + Read all bytes from the stream. + + This is a generator-based coroutine. + + """ + while not self.eof: + yield + r = self.buffer[:] + del self.buffer[:] + return r + + def at_eof(self) -> Generator[None, None, bool]: + """ + Tell whether the stream has ended and all data was read. + + This is a generator-based coroutine. + + """ + while True: + if self.buffer: + return False + if self.eof: + return True + # When all data was read but the stream hasn't ended, we can't + # tell if until either feed_data() or feed_eof() is called. + yield + + def feed_data(self, data: bytes) -> None: + """ + Write ``data`` to the stream. + + :meth:`feed_data()` cannot be called after :meth:`feed_eof()`. + + :raises EOFError: if the stream has ended + + """ + if self.eof: + raise EOFError("stream ended") + self.buffer += data + + def feed_eof(self) -> None: + """ + End the stream. + + :meth:`feed_eof()` must be called at must once. + + :raises EOFError: if the stream has ended + + """ + if self.eof: + raise EOFError("stream ended") + self.eof = True diff --git a/tests/test_streams.py b/tests/test_streams.py new file mode 100644 index 000000000..566deb2db --- /dev/null +++ b/tests/test_streams.py @@ -0,0 +1,146 @@ +from websockets.streams import StreamReader + +from .utils import GeneratorTestCase + + +class StreamReaderTests(GeneratorTestCase): + def setUp(self): + self.reader = StreamReader() + + def test_read_line(self): + self.reader.feed_data(b"spam\neggs\n") + + gen = self.reader.read_line() + line = self.assertGeneratorReturns(gen) + self.assertEqual(line, b"spam\n") + + gen = self.reader.read_line() + line = self.assertGeneratorReturns(gen) + self.assertEqual(line, b"eggs\n") + + def test_read_line_need_more_data(self): + self.reader.feed_data(b"spa") + + gen = self.reader.read_line() + self.assertGeneratorRunning(gen) + self.reader.feed_data(b"m\neg") + line = self.assertGeneratorReturns(gen) + self.assertEqual(line, b"spam\n") + + gen = self.reader.read_line() + self.assertGeneratorRunning(gen) + self.reader.feed_data(b"gs\n") + line = self.assertGeneratorReturns(gen) + self.assertEqual(line, b"eggs\n") + + def test_read_line_not_enough_data(self): + self.reader.feed_data(b"spa") + self.reader.feed_eof() + + gen = self.reader.read_line() + with self.assertRaises(EOFError) as raised: + next(gen) + self.assertEqual( + str(raised.exception), "stream ends after 3 bytes, before end of line" + ) + + def test_read_exact(self): + self.reader.feed_data(b"spameggs") + + gen = self.reader.read_exact(4) + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"spam") + + gen = self.reader.read_exact(4) + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"eggs") + + def test_read_exact_need_more_data(self): + self.reader.feed_data(b"spa") + + gen = self.reader.read_exact(4) + self.assertGeneratorRunning(gen) + self.reader.feed_data(b"meg") + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"spam") + + gen = self.reader.read_exact(4) + self.assertGeneratorRunning(gen) + self.reader.feed_data(b"gs") + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"eggs") + + def test_read_exact_not_enough_data(self): + self.reader.feed_data(b"spa") + self.reader.feed_eof() + + gen = self.reader.read_exact(4) + with self.assertRaises(EOFError) as raised: + next(gen) + self.assertEqual( + str(raised.exception), "stream ends after 3 bytes, expected 4 bytes" + ) + + def test_read_to_eof(self): + gen = self.reader.read_to_eof() + + self.reader.feed_data(b"spam") + self.assertGeneratorRunning(gen) + + self.reader.feed_eof() + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"spam") + + def test_read_to_eof_at_eof(self): + self.reader.feed_eof() + + gen = self.reader.read_to_eof() + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"") + + def test_at_eof_after_feed_data(self): + gen = self.reader.at_eof() + self.assertGeneratorRunning(gen) + self.reader.feed_data(b"spam") + eof = self.assertGeneratorReturns(gen) + self.assertFalse(eof) + + def test_at_eof_after_feed_eof(self): + gen = self.reader.at_eof() + self.assertGeneratorRunning(gen) + self.reader.feed_eof() + eof = self.assertGeneratorReturns(gen) + self.assertTrue(eof) + + def test_feed_data_after_feed_data(self): + self.reader.feed_data(b"spam") + self.reader.feed_data(b"eggs") + + gen = self.reader.read_exact(8) + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"spameggs") + gen = self.reader.at_eof() + self.assertGeneratorRunning(gen) + + def test_feed_eof_after_feed_data(self): + self.reader.feed_data(b"spam") + self.reader.feed_eof() + + gen = self.reader.read_exact(4) + data = self.assertGeneratorReturns(gen) + self.assertEqual(data, b"spam") + gen = self.reader.at_eof() + eof = self.assertGeneratorReturns(gen) + self.assertTrue(eof) + + def test_feed_data_after_feed_eof(self): + self.reader.feed_eof() + with self.assertRaises(EOFError) as raised: + self.reader.feed_data(b"spam") + self.assertEqual(str(raised.exception), "stream ended") + + def test_feed_eof_after_feed_eof(self): + self.reader.feed_eof() + with self.assertRaises(EOFError) as raised: + self.reader.feed_eof() + self.assertEqual(str(raised.exception), "stream ended") diff --git a/tests/utils.py b/tests/utils.py index 983a91edf..bbffa8649 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,6 +7,24 @@ import unittest +class GeneratorTestCase(unittest.TestCase): + def assertGeneratorRunning(self, gen): + """ + Check that a generator-based coroutine hasn't completed yet. + + """ + next(gen) + + def assertGeneratorReturns(self, gen): + """ + Check that a generator-based coroutine completes and return its value. + + """ + with self.assertRaises(StopIteration) as raised: + next(gen) + return raised.exception.value + + class AsyncioTestCase(unittest.TestCase): """ Base class for tests that sets up an isolated event loop for each test. From 624b9d20061c78df81f659af2c87557c764ebb19 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 21:27:06 +0200 Subject: [PATCH 200/281] Add a sans-I/O compatible framing implementation. --- docs/changelog.rst | 2 + src/websockets/extensions/base.py | 2 +- .../extensions/permessage_deflate.py | 2 +- src/websockets/frames.py | 322 ++++++++++++++++++ src/websockets/framing.py | 233 +------------ src/websockets/protocol.py | 19 +- tests/__init__.py | 10 + tests/extensions/test_permessage_deflate.py | 2 +- tests/test_frames.py | 232 +++++++++++++ tests/test_framing.py | 103 +----- tests/test_protocol.py | 11 +- 11 files changed, 624 insertions(+), 314 deletions(-) create mode 100644 src/websockets/frames.py create mode 100644 tests/test_frames.py diff --git a/docs/changelog.rst b/docs/changelog.rst index 3cda4919f..68ec6f80c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -20,6 +20,8 @@ Changelog * :mod:`websockets.http` is deprecated. + * :mod:`websocket.framing` is deprecated. + Aliases provide backwards compatibility for all previously public APIs. 8.1 diff --git a/src/websockets/extensions/base.py b/src/websockets/extensions/base.py index aa52a7adb..cfc090799 100644 --- a/src/websockets/extensions/base.py +++ b/src/websockets/extensions/base.py @@ -10,7 +10,7 @@ from typing import List, Optional, Sequence, Tuple -from ..framing import Frame +from ..frames import Frame from ..typing import ExtensionName, ExtensionParameter diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index e38d9edab..f1adf8bb6 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -14,7 +14,7 @@ NegotiationError, PayloadTooBig, ) -from ..framing import CTRL_OPCODES, OP_CONT, Frame +from ..frames import CTRL_OPCODES, OP_CONT, Frame from ..typing import ExtensionName, ExtensionParameter from .base import ClientExtensionFactory, Extension, ServerExtensionFactory diff --git a/src/websockets/frames.py b/src/websockets/frames.py new file mode 100644 index 000000000..5ed8e483f --- /dev/null +++ b/src/websockets/frames.py @@ -0,0 +1,322 @@ +""" +Parse and serialize WebSocket frames. + +""" + +import io +import random +import struct +from typing import Callable, Generator, NamedTuple, Optional, Sequence, Tuple + +from .exceptions import PayloadTooBig, ProtocolError +from .typing import Data + + +try: + from .speedups import apply_mask +except ImportError: # pragma: no cover + from .utils import apply_mask + + +__all__ = [ + "DATA_OPCODES", + "CTRL_OPCODES", + "OP_CONT", + "OP_TEXT", + "OP_BINARY", + "OP_CLOSE", + "OP_PING", + "OP_PONG", + "Frame", + "prepare_data", + "prepare_ctrl", + "parse_close", + "serialize_close", +] + +DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY = 0x00, 0x01, 0x02 +CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG = 0x08, 0x09, 0x0A + +# Close code that are allowed in a close frame. +# Using a list optimizes `code in EXTERNAL_CLOSE_CODES`. +EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] + + +# Consider converting to a dataclass when dropping support for Python < 3.7. + + +class Frame(NamedTuple): + """ + WebSocket frame. + + :param bool fin: FIN bit + :param bool rsv1: RSV1 bit + :param bool rsv2: RSV2 bit + :param bool rsv3: RSV3 bit + :param int opcode: opcode + :param bytes data: payload data + + Only these fields are needed. The MASK bit, payload length and masking-key + are handled on the fly by :func:`parse_frame` and :meth:`serialize_frame`. + + """ + + fin: bool + opcode: int + data: bytes + rsv1: bool = False + rsv2: bool = False + rsv3: bool = False + + @classmethod + def parse( + cls, + read_exact: Callable[[int], Generator[None, None, bytes]], + *, + mask: bool, + max_size: Optional[int] = None, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + ) -> Generator[None, None, "Frame"]: + """ + Read a WebSocket frame. + + :param read_exact: generator-based coroutine that reads the requested + number of bytes or raises an exception if there isn't enough data + :param mask: whether the frame should be masked i.e. whether the read + happens on the server side + :param max_size: maximum payload size in bytes + :param extensions: list of classes with a ``decode()`` method that + transforms the frame and return a new frame; extensions are applied + in reverse order + :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds + ``max_size`` + :raises ~websockets.exceptions.ProtocolError: if the frame + contains incorrect values + + """ + # Read the header. + data = yield from read_exact(2) + head1, head2 = struct.unpack("!BB", data) + + # While not Pythonic, this is marginally faster than calling bool(). + fin = True if head1 & 0b10000000 else False + rsv1 = True if head1 & 0b01000000 else False + rsv2 = True if head1 & 0b00100000 else False + rsv3 = True if head1 & 0b00010000 else False + opcode = head1 & 0b00001111 + + if (True if head2 & 0b10000000 else False) != mask: + raise ProtocolError("incorrect masking") + + length = head2 & 0b01111111 + if length == 126: + data = yield from read_exact(2) + (length,) = struct.unpack("!H", data) + elif length == 127: + data = yield from read_exact(8) + (length,) = struct.unpack("!Q", data) + if max_size is not None and length > max_size: + raise PayloadTooBig( + f"payload length exceeds size limit ({length} > {max_size} bytes)" + ) + if mask: + mask_bits = yield from read_exact(4) + + # Read the data. + data = yield from read_exact(length) + if mask: + data = apply_mask(data, mask_bits) + + frame = cls(fin, opcode, data, rsv1, rsv2, rsv3) + + if extensions is None: + extensions = [] + for extension in reversed(extensions): + frame = extension.decode(frame, max_size=max_size) + + frame.check() + + return frame + + def serialize( + self, + *, + mask: bool, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + ) -> bytes: + """ + Write a WebSocket frame. + + :param frame: frame to write + :param mask: whether the frame should be masked i.e. whether the write + happens on the client side + :param extensions: list of classes with an ``encode()`` method that + transform the frame and return a new frame; extensions are applied + in order + :raises ~websockets.exceptions.ProtocolError: if the frame + contains incorrect values + + """ + self.check() + + if extensions is None: + extensions = [] + for extension in extensions: + self = extension.encode(self) + + output = io.BytesIO() + + # Prepare the header. + head1 = ( + (0b10000000 if self.fin else 0) + | (0b01000000 if self.rsv1 else 0) + | (0b00100000 if self.rsv2 else 0) + | (0b00010000 if self.rsv3 else 0) + | self.opcode + ) + + head2 = 0b10000000 if mask else 0 + + length = len(self.data) + if length < 126: + output.write(struct.pack("!BB", head1, head2 | length)) + elif length < 65536: + output.write(struct.pack("!BBH", head1, head2 | 126, length)) + else: + output.write(struct.pack("!BBQ", head1, head2 | 127, length)) + + if mask: + mask_bits = struct.pack("!I", random.getrandbits(32)) + output.write(mask_bits) + + # Prepare the data. + if mask: + data = apply_mask(self.data, mask_bits) + else: + data = self.data + output.write(data) + + return output.getvalue() + + def check(self) -> None: + """ + Check that reserved bits and opcode have acceptable values. + + :raises ~websockets.exceptions.ProtocolError: if a reserved + bit or the opcode is invalid + + """ + if self.rsv1 or self.rsv2 or self.rsv3: + raise ProtocolError("reserved bits must be 0") + + if self.opcode in DATA_OPCODES: + return + elif self.opcode in CTRL_OPCODES: + if len(self.data) > 125: + raise ProtocolError("control frame too long") + if not self.fin: + raise ProtocolError("fragmented control frame") + else: + raise ProtocolError(f"invalid opcode: {self.opcode}") + + +def prepare_data(data: Data) -> Tuple[int, bytes]: + """ + Convert a string or byte-like object to an opcode and a bytes-like object. + + This function is designed for data frames. + + If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes` + object encoding ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like + object. + + :raises TypeError: if ``data`` doesn't have a supported type + + """ + if isinstance(data, str): + return OP_TEXT, data.encode("utf-8") + elif isinstance(data, (bytes, bytearray)): + return OP_BINARY, data + elif isinstance(data, memoryview): + if data.c_contiguous: + return OP_BINARY, data + else: + return OP_BINARY, data.tobytes() + else: + raise TypeError("data must be bytes-like or str") + + +def prepare_ctrl(data: Data) -> bytes: + """ + Convert a string or byte-like object to bytes. + + This function is designed for ping and pong frames. + + If ``data`` is a :class:`str`, return a :class:`bytes` object encoding + ``data`` in UTF-8. + + If ``data`` is a bytes-like object, return a :class:`bytes` object. + + :raises TypeError: if ``data`` doesn't have a supported type + + """ + if isinstance(data, str): + return data.encode("utf-8") + elif isinstance(data, (bytes, bytearray)): + return bytes(data) + elif isinstance(data, memoryview): + return data.tobytes() + else: + raise TypeError("data must be bytes-like or str") + + +def parse_close(data: bytes) -> Tuple[int, str]: + """ + Parse the payload from a close frame. + + Return ``(code, reason)``. + + :raises ~websockets.exceptions.ProtocolError: if data is ill-formed + :raises UnicodeDecodeError: if the reason isn't valid UTF-8 + + """ + length = len(data) + if length >= 2: + (code,) = struct.unpack("!H", data[:2]) + check_close(code) + reason = data[2:].decode("utf-8") + return code, reason + elif length == 0: + return 1005, "" + else: + assert length == 1 + raise ProtocolError("close frame too short") + + +def serialize_close(code: int, reason: str) -> bytes: + """ + Serialize the payload for a close frame. + + This is the reverse of :func:`parse_close`. + + """ + check_close(code) + return struct.pack("!H", code) + reason.encode("utf-8") + + +def check_close(code: int) -> None: + """ + Check that the close code has an acceptable value for a close frame. + + :raises ~websockets.exceptions.ProtocolError: if the close code + is invalid + + """ + if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): + raise ProtocolError("invalid status code") + + +# at the bottom to allow circular import, because Extension depends on Frame +import websockets.extensions.base # isort:skip # noqa diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 26e58cdbf..221afad6f 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -10,13 +10,12 @@ """ -import io -import random import struct -from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple +import warnings +from typing import Any, Awaitable, Callable, Optional, Sequence from .exceptions import PayloadTooBig, ProtocolError -from .typing import Data +from .frames import Frame as NewFrame try: @@ -25,56 +24,10 @@ from .utils import apply_mask -__all__ = [ - "DATA_OPCODES", - "CTRL_OPCODES", - "OP_CONT", - "OP_TEXT", - "OP_BINARY", - "OP_CLOSE", - "OP_PING", - "OP_PONG", - "Frame", - "prepare_data", - "encode_data", - "parse_close", - "serialize_close", -] +warnings.warn("websockets.framing is deprecated", DeprecationWarning) -DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY = 0x00, 0x01, 0x02 -CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG = 0x08, 0x09, 0x0A - -# Close code that are allowed in a close frame. -# Using a list optimizes `code in EXTERNAL_CLOSE_CODES`. -EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] - - -# Consider converting to a dataclass when dropping support for Python < 3.7. - - -class Frame(NamedTuple): - """ - WebSocket frame. - - :param bool fin: FIN bit - :param bool rsv1: RSV1 bit - :param bool rsv2: RSV2 bit - :param bool rsv3: RSV3 bit - :param int opcode: opcode - :param bytes data: payload data - - Only these fields are needed. The MASK bit, payload length and masking-key - are handled on the fly by :meth:`read` and :meth:`write`. - - """ - - fin: bool - opcode: int - data: bytes - rsv1: bool = False - rsv2: bool = False - rsv3: bool = False +class Frame(NewFrame): @classmethod async def read( cls, @@ -101,6 +54,7 @@ async def read( contains incorrect values """ + # Read the header. data = await reader(2) head1, head2 = struct.unpack("!BB", data) @@ -139,14 +93,14 @@ async def read( if extensions is None: extensions = [] for extension in reversed(extensions): - frame = extension.decode(frame, max_size=max_size) + frame = cls(*extension.decode(frame, max_size=max_size)) frame.check() return frame def write( - frame, + self, write: Callable[[bytes], Any], *, mask: bool, @@ -166,176 +120,17 @@ def write( contains incorrect values """ - # The first parameter is called `frame` rather than `self`, - # but it's the instance of class to which this method is bound. - - frame.check() - - if extensions is None: - extensions = [] - for extension in extensions: - frame = extension.encode(frame) - - output = io.BytesIO() - - # Prepare the header. - head1 = ( - (0b10000000 if frame.fin else 0) - | (0b01000000 if frame.rsv1 else 0) - | (0b00100000 if frame.rsv2 else 0) - | (0b00010000 if frame.rsv3 else 0) - | frame.opcode - ) - - head2 = 0b10000000 if mask else 0 - - length = len(frame.data) - if length < 126: - output.write(struct.pack("!BB", head1, head2 | length)) - elif length < 65536: - output.write(struct.pack("!BBH", head1, head2 | 126, length)) - else: - output.write(struct.pack("!BBQ", head1, head2 | 127, length)) - - if mask: - mask_bits = struct.pack("!I", random.getrandbits(32)) - output.write(mask_bits) - - # Prepare the data. - if mask: - data = apply_mask(frame.data, mask_bits) - else: - data = frame.data - output.write(data) - - # Send the frame. - # The frame is written in a single call to write in order to prevent # TCP fragmentation. See #68 for details. This also makes it safe to # send frames concurrently from multiple coroutines. - write(output.getvalue()) - - def check(frame) -> None: - """ - Check that reserved bits and opcode have acceptable values. - - :raises ~websockets.exceptions.ProtocolError: if a reserved - bit or the opcode is invalid - - """ - # The first parameter is called `frame` rather than `self`, - # but it's the instance of class to which this method is bound. - - if frame.rsv1 or frame.rsv2 or frame.rsv3: - raise ProtocolError("reserved bits must be 0") - - if frame.opcode in DATA_OPCODES: - return - elif frame.opcode in CTRL_OPCODES: - if len(frame.data) > 125: - raise ProtocolError("control frame too long") - if not frame.fin: - raise ProtocolError("fragmented control frame") - else: - raise ProtocolError(f"invalid opcode: {frame.opcode}") - - -def prepare_data(data: Data) -> Tuple[int, bytes]: - """ - Convert a string or byte-like object to an opcode and a bytes-like object. - - This function is designed for data frames. - - If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes` - object encoding ``data`` in UTF-8. - - If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like - object. - - :raises TypeError: if ``data`` doesn't have a supported type - - """ - if isinstance(data, str): - return OP_TEXT, data.encode("utf-8") - elif isinstance(data, (bytes, bytearray)): - return OP_BINARY, data - elif isinstance(data, memoryview): - if data.c_contiguous: - return OP_BINARY, data - else: - return OP_BINARY, data.tobytes() - else: - raise TypeError("data must be bytes-like or str") - - -def encode_data(data: Data) -> bytes: - """ - Convert a string or byte-like object to bytes. - - This function is designed for ping and pong frames. - - If ``data`` is a :class:`str`, return a :class:`bytes` object encoding - ``data`` in UTF-8. - - If ``data`` is a bytes-like object, return a :class:`bytes` object. - - :raises TypeError: if ``data`` doesn't have a supported type - - """ - if isinstance(data, str): - return data.encode("utf-8") - elif isinstance(data, (bytes, bytearray)): - return bytes(data) - elif isinstance(data, memoryview): - return data.tobytes() - else: - raise TypeError("data must be bytes-like or str") - - -def parse_close(data: bytes) -> Tuple[int, str]: - """ - Parse the payload from a close frame. - - Return ``(code, reason)``. - - :raises ~websockets.exceptions.ProtocolError: if data is ill-formed - :raises UnicodeDecodeError: if the reason isn't valid UTF-8 - - """ - length = len(data) - if length >= 2: - (code,) = struct.unpack("!H", data[:2]) - check_close(code) - reason = data[2:].decode("utf-8") - return code, reason - elif length == 0: - return 1005, "" - else: - assert length == 1 - raise ProtocolError("close frame too short") - - -def serialize_close(code: int, reason: str) -> bytes: - """ - Serialize the payload for a close frame. - - This is the reverse of :func:`parse_close`. - - """ - check_close(code) - return struct.pack("!H", code) + reason.encode("utf-8") - - -def check_close(code: int) -> None: - """ - Check that the close code has an acceptable value for a close frame. + write(self.serialize(mask=mask, extensions=extensions)) - :raises ~websockets.exceptions.ProtocolError: if the close code - is invalid - """ - if not (code in EXTERNAL_CLOSE_CODES or 3000 <= code < 5000): - raise ProtocolError("invalid status code") +# Backwards compatibility with previously documented public APIs +from .frames import parse_close # isort:skip # noqa +from .frames import prepare_ctrl as encode_data # isort:skip # noqa +from .frames import prepare_data # isort:skip # noqa +from .frames import serialize_close # isort:skip # noqa # at the bottom to allow circular import, because Extension depends on Frame diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index cc4416ba8..748c1ae66 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -40,8 +40,19 @@ ProtocolError, ) from .extensions.base import Extension -from .framing import * -from .handshake_legacy import * +from .frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + parse_close, + prepare_ctrl, + prepare_data, + serialize_close, +) +from .framing import Frame from .typing import Data @@ -732,7 +743,7 @@ async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: await self.ensure_open() if data is not None: - data = encode_data(data) + data = prepare_ctrl(data) # Protect against duplicates if a payload is explicitly set. if data in self.pings: @@ -763,7 +774,7 @@ async def pong(self, data: Data = b"") -> None: """ await self.ensure_open() - data = encode_data(data) + data = prepare_ctrl(data) await self.write_frame(True, OP_PONG, data) diff --git a/tests/__init__.py b/tests/__init__.py index dd78609f5..76c869f50 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,15 @@ import logging +import warnings # Avoid displaying stack traces at the ERROR logging level. logging.basicConfig(level=logging.CRITICAL) + + +# Ignore deprecation warnings while refactoring is in progress +warnings.filterwarnings( + action="ignore", + message=r"websockets\.framing is deprecated", + category=DeprecationWarning, + module="websockets.framing", +) diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index 0ec49c6c0..e1193e672 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -9,7 +9,7 @@ PayloadTooBig, ) from websockets.extensions.permessage_deflate import * -from websockets.framing import ( +from websockets.frames import ( OP_BINARY, OP_CLOSE, OP_CONT, diff --git a/tests/test_frames.py b/tests/test_frames.py new file mode 100644 index 000000000..39d4055a8 --- /dev/null +++ b/tests/test_frames.py @@ -0,0 +1,232 @@ +import codecs +import struct +import unittest +import unittest.mock + +from websockets.exceptions import PayloadTooBig, ProtocolError +from websockets.frames import * +from websockets.streams import StreamReader + +from .utils import GeneratorTestCase + + +class FrameTests(GeneratorTestCase): + def parse(self, data, mask=False, max_size=None, extensions=None): + reader = StreamReader() + reader.feed_data(data) + reader.feed_eof() + parser = Frame.parse( + reader.read_exact, mask=mask, max_size=max_size, extensions=extensions, + ) + return self.assertGeneratorReturns(parser) + + def round_trip(self, data, frame, mask=False, extensions=None): + parsed = self.parse(data, mask=mask, extensions=extensions) + self.assertEqual(parsed, frame) + + # Make masking deterministic by reusing the same "random" mask. + # This has an effect only when mask is True. + randbits = struct.unpack("!I", data[2:6])[0] if mask else 0 + with unittest.mock.patch("random.getrandbits", return_value=randbits): + serialized = parsed.serialize(mask=mask, extensions=extensions) + self.assertEqual(serialized, data) + + def test_text(self): + self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) + + def test_text_masked(self): + self.round_trip( + b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", + Frame(True, OP_TEXT, b"Spam"), + mask=True, + ) + + def test_binary(self): + self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs")) + + def test_binary_masked(self): + self.round_trip( + b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", + Frame(True, OP_BINARY, b"Eggs"), + mask=True, + ) + + def test_non_ascii_text(self): + self.round_trip( + b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8")) + ) + + def test_non_ascii_text_masked(self): + self.round_trip( + b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", + Frame(True, OP_TEXT, "café".encode("utf-8")), + mask=True, + ) + + def test_close(self): + self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b"")) + + def test_ping(self): + self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping")) + + def test_pong(self): + self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong")) + + def test_long(self): + self.round_trip( + b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a") + ) + + def test_very_long(self): + self.round_trip( + b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", + Frame(True, OP_BINARY, 65536 * b"a"), + ) + + def test_payload_too_big(self): + with self.assertRaises(PayloadTooBig): + self.parse(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024) + + def test_bad_reserved_bits(self): + for data in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: + with self.subTest(data=data): + with self.assertRaises(ProtocolError): + self.parse(data) + + def test_good_opcode(self): + for opcode in list(range(0x00, 0x03)) + list(range(0x08, 0x0B)): + data = bytes([0x80 | opcode, 0]) + with self.subTest(data=data): + self.parse(data) # does not raise an exception + + def test_bad_opcode(self): + for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)): + data = bytes([0x80 | opcode, 0]) + with self.subTest(data=data): + with self.assertRaises(ProtocolError): + self.parse(data) + + def test_mask_flag(self): + # Mask flag correctly set. + self.parse(b"\x80\x80\x00\x00\x00\x00", mask=True) + # Mask flag incorrectly unset. + with self.assertRaises(ProtocolError): + self.parse(b"\x80\x80\x00\x00\x00\x00") + # Mask flag correctly unset. + self.parse(b"\x80\x00") + # Mask flag incorrectly set. + with self.assertRaises(ProtocolError): + self.parse(b"\x80\x00", mask=True) + + def test_control_frame_max_length(self): + # At maximum allowed length. + self.parse(b"\x88\x7e\x00\x7d" + 125 * b"a") + # Above maximum allowed length. + with self.assertRaises(ProtocolError): + self.parse(b"\x88\x7e\x00\x7e" + 126 * b"a") + + def test_fragmented_control_frame(self): + # Fin bit correctly set. + self.parse(b"\x88\x00") + # Fin bit incorrectly unset. + with self.assertRaises(ProtocolError): + self.parse(b"\x08\x00") + + def test_extensions(self): + class Rot13: + @staticmethod + def encode(frame): + assert frame.opcode == OP_TEXT + text = frame.data.decode() + data = codecs.encode(text, "rot13").encode() + return frame._replace(data=data) + + # This extensions is symmetrical. + @staticmethod + def decode(frame, *, max_size=None): + return Rot13.encode(frame) + + self.round_trip( + b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()] + ) + + +class PrepareDataTests(unittest.TestCase): + def test_prepare_data_str(self): + self.assertEqual(prepare_data("café"), (OP_TEXT, b"caf\xc3\xa9")) + + def test_prepare_data_bytes(self): + self.assertEqual(prepare_data(b"tea"), (OP_BINARY, b"tea")) + + def test_prepare_data_bytearray(self): + self.assertEqual( + prepare_data(bytearray(b"tea")), (OP_BINARY, bytearray(b"tea")) + ) + + def test_prepare_data_memoryview(self): + self.assertEqual( + prepare_data(memoryview(b"tea")), (OP_BINARY, memoryview(b"tea")) + ) + + def test_prepare_data_non_contiguous_memoryview(self): + self.assertEqual(prepare_data(memoryview(b"tteeaa")[::2]), (OP_BINARY, b"tea")) + + def test_prepare_data_list(self): + with self.assertRaises(TypeError): + prepare_data([]) + + def test_prepare_data_none(self): + with self.assertRaises(TypeError): + prepare_data(None) + + +class PrepareCtrlTests(unittest.TestCase): + def test_prepare_ctrl_str(self): + self.assertEqual(prepare_ctrl("café"), b"caf\xc3\xa9") + + def test_prepare_ctrl_bytes(self): + self.assertEqual(prepare_ctrl(b"tea"), b"tea") + + def test_prepare_ctrl_bytearray(self): + self.assertEqual(prepare_ctrl(bytearray(b"tea")), b"tea") + + def test_prepare_ctrl_memoryview(self): + self.assertEqual(prepare_ctrl(memoryview(b"tea")), b"tea") + + def test_prepare_ctrl_non_contiguous_memoryview(self): + self.assertEqual(prepare_ctrl(memoryview(b"tteeaa")[::2]), b"tea") + + def test_prepare_ctrl_list(self): + with self.assertRaises(TypeError): + prepare_ctrl([]) + + def test_prepare_ctrl_none(self): + with self.assertRaises(TypeError): + prepare_ctrl(None) + + +class ParseAndSerializeCloseTests(unittest.TestCase): + def round_trip(self, data, code, reason): + parsed = parse_close(data) + self.assertEqual(parsed, (code, reason)) + serialized = serialize_close(code, reason) + self.assertEqual(serialized, data) + + def test_parse_close_and_serialize_close(self): + self.round_trip(b"\x03\xe8", 1000, "") + self.round_trip(b"\x03\xe8OK", 1000, "OK") + + def test_parse_close_empty(self): + self.assertEqual(parse_close(b""), (1005, "")) + + def test_parse_close_errors(self): + with self.assertRaises(ProtocolError): + parse_close(b"\x03") + with self.assertRaises(ProtocolError): + parse_close(b"\x03\xe7") + with self.assertRaises(UnicodeDecodeError): + parse_close(b"\x03\xe8\xff\xff") + + def test_serialize_close_errors(self): + with self.assertRaises(ProtocolError): + serialize_close(999, "") diff --git a/tests/test_framing.py b/tests/test_framing.py index 5def415d2..231cbf718 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -2,8 +2,10 @@ import codecs import unittest import unittest.mock +import warnings from websockets.exceptions import PayloadTooBig, ProtocolError +from websockets.frames import OP_BINARY, OP_CLOSE, OP_PING, OP_PONG, OP_TEXT from websockets.framing import * from .utils import AsyncioTestCase @@ -11,24 +13,26 @@ class FramingTests(AsyncioTestCase): def decode(self, message, mask=False, max_size=None, extensions=None): - self.stream = asyncio.StreamReader(loop=self.loop) - self.stream.feed_data(message) - self.stream.feed_eof() - frame = self.loop.run_until_complete( - Frame.read( - self.stream.readexactly, - mask=mask, - max_size=max_size, - extensions=extensions, + stream = asyncio.StreamReader(loop=self.loop) + stream.feed_data(message) + stream.feed_eof() + with warnings.catch_warnings(record=True): + frame = self.loop.run_until_complete( + Frame.read( + stream.readexactly, + mask=mask, + max_size=max_size, + extensions=extensions, + ) ) - ) # Make sure all the data was consumed. - self.assertTrue(self.stream.at_eof()) + self.assertTrue(stream.at_eof()) return frame def encode(self, frame, mask=False, extensions=None): write = unittest.mock.Mock() - frame.write(write, mask=mask, extensions=extensions) + with warnings.catch_warnings(record=True): + frame.write(write, mask=mask, extensions=extensions) # Ensure the entire frame is sent with a single call to write(). # Multiple calls cause TCP fragmentation and degrade performance. self.assertEqual(write.call_count, 1) @@ -47,12 +51,6 @@ def round_trip(self, message, expected, mask=False, extensions=None): else: # deterministic encoding self.assertEqual(encoded, message) - def round_trip_close(self, data, code, reason): - parsed = parse_close(data) - self.assertEqual(parsed, (code, reason)) - serialized = serialize_close(code, reason) - self.assertEqual(serialized, data) - def test_text(self): self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) @@ -147,56 +145,6 @@ def test_control_frame_max_length(self): with self.assertRaises(ProtocolError): self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a") - def test_prepare_data_str(self): - self.assertEqual(prepare_data("café"), (OP_TEXT, b"caf\xc3\xa9")) - - def test_prepare_data_bytes(self): - self.assertEqual(prepare_data(b"tea"), (OP_BINARY, b"tea")) - - def test_prepare_data_bytearray(self): - self.assertEqual( - prepare_data(bytearray(b"tea")), (OP_BINARY, bytearray(b"tea")) - ) - - def test_prepare_data_memoryview(self): - self.assertEqual( - prepare_data(memoryview(b"tea")), (OP_BINARY, memoryview(b"tea")) - ) - - def test_prepare_data_non_contiguous_memoryview(self): - self.assertEqual(prepare_data(memoryview(b"tteeaa")[::2]), (OP_BINARY, b"tea")) - - def test_prepare_data_list(self): - with self.assertRaises(TypeError): - prepare_data([]) - - def test_prepare_data_none(self): - with self.assertRaises(TypeError): - prepare_data(None) - - def test_encode_data_str(self): - self.assertEqual(encode_data("café"), b"caf\xc3\xa9") - - def test_encode_data_bytes(self): - self.assertEqual(encode_data(b"tea"), b"tea") - - def test_encode_data_bytearray(self): - self.assertEqual(encode_data(bytearray(b"tea")), b"tea") - - def test_encode_data_memoryview(self): - self.assertEqual(encode_data(memoryview(b"tea")), b"tea") - - def test_encode_data_non_contiguous_memoryview(self): - self.assertEqual(encode_data(memoryview(b"tteeaa")[::2]), b"tea") - - def test_encode_data_list(self): - with self.assertRaises(TypeError): - encode_data([]) - - def test_encode_data_none(self): - with self.assertRaises(TypeError): - encode_data(None) - def test_fragmented_control_frame(self): # Fin bit correctly set. self.decode(b"\x88\x00") @@ -204,25 +152,6 @@ def test_fragmented_control_frame(self): with self.assertRaises(ProtocolError): self.decode(b"\x08\x00") - def test_parse_close_and_serialize_close(self): - self.round_trip_close(b"\x03\xe8", 1000, "") - self.round_trip_close(b"\x03\xe8OK", 1000, "OK") - - def test_parse_close_empty(self): - self.assertEqual(parse_close(b""), (1005, "")) - - def test_parse_close_errors(self): - with self.assertRaises(ProtocolError): - parse_close(b"\x03") - with self.assertRaises(ProtocolError): - parse_close(b"\x03\xe7") - with self.assertRaises(UnicodeDecodeError): - parse_close(b"\x03\xe8\xff\xff") - - def test_serialize_close_errors(self): - with self.assertRaises(ProtocolError): - serialize_close(999, "") - def test_extensions(self): class Rot13: @staticmethod diff --git a/tests/test_protocol.py b/tests/test_protocol.py index d32c1f72e..91fb02a50 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -6,7 +6,16 @@ import warnings from websockets.exceptions import ConnectionClosed, InvalidState -from websockets.framing import * +from websockets.frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + serialize_close, +) +from websockets.framing import Frame from websockets.protocol import State, WebSocketCommonProtocol from .utils import MS, AsyncioTestCase From 7b67307ec9f324535cea7e141c6d1a43cb47f4ff Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 14 Oct 2019 21:55:06 +0200 Subject: [PATCH 201/281] Add a sans-I/O compatible HTTP/1.1 implementation. --- src/websockets/http11.py | 295 +++++++++++++++++++++++++++++++++++++++ tests/test_http11.py | 271 +++++++++++++++++++++++++++++++++++ 2 files changed, 566 insertions(+) create mode 100644 src/websockets/http11.py create mode 100644 tests/test_http11.py diff --git a/src/websockets/http11.py b/src/websockets/http11.py new file mode 100644 index 000000000..e1d004881 --- /dev/null +++ b/src/websockets/http11.py @@ -0,0 +1,295 @@ +import re +from typing import Callable, Generator, NamedTuple, Optional + +from .datastructures import Headers +from .exceptions import SecurityError + + +MAX_HEADERS = 256 +MAX_LINE = 4096 + + +def d(value: bytes) -> str: + """ + Decode a bytestring for interpolating into an error message. + + """ + return value.decode(errors="backslashreplace") + + +# See https://tools.ietf.org/html/rfc7230#appendix-B. + +# Regex for validating header names. + +_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+") + +# Regex for validating header values. + +# We don't attempt to support obsolete line folding. + +# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff). + +# The ABNF is complicated because it attempts to express that optional +# whitespace is ignored. We strip whitespace and don't revalidate that. + +# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 + +_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*") + + +# Consider converting to dataclasses when dropping support for Python < 3.7. + + +class Request(NamedTuple): + """ + WebSocket handshake request. + + :param path: path and optional query + :param headers: + """ + + path: str + headers: Headers + # body isn't useful is the context of this library + + @classmethod + def parse( + cls, read_line: Callable[[], Generator[None, None, bytes]] + ) -> Generator[None, None, "Request"]: + """ + Parse an HTTP/1.1 GET request and return ``(path, headers)``. + + ``path`` isn't URL-decoded or validated in any way. + + ``path`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`parse_request` doesn't attempt to read the request body because + WebSocket handshake requests don't have one. If the request contains a + body, it may be read from ``stream`` after this coroutine returns. + + :param read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data + :raises EOFError: if the connection is closed without a full HTTP request + :raises SecurityError: if the request exceeds a security limit + :raises ValueError: if the request isn't well formatted + + """ + # https://tools.ietf.org/html/rfc7230#section-3.1.1 + + # Parsing is simple because fixed values are expected for method and + # version and because path isn't checked. Since WebSocket software tends + # to implement HTTP/1.1 strictly, there's little need for lenient parsing. + + try: + request_line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP request line") from exc + + try: + method, raw_path, version = request_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None + + if method != b"GET": + raise ValueError(f"unsupported HTTP method: {d(method)}") + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + path = raw_path.decode("ascii", "surrogateescape") + + headers = yield from parse_headers(read_line) + + return cls(path, headers) + + def serialize(self) -> bytes: + """ + Serialize an HTTP/1.1 GET request. + + """ + # Since the path and headers only contain ASCII characters, + # we can keep this simple. + request = f"GET {self.path} HTTP/1.1\r\n".encode() + request += self.headers.serialize() + return request + + +# Consider converting to dataclasses when dropping support for Python < 3.7. + + +class Response(NamedTuple): + """ + WebSocket handshake response. + + """ + + status_code: int + reason_phrase: str + headers: Headers + body: Optional[bytes] = None + + @classmethod + def parse( + cls, + read_line: Callable[[], Generator[None, None, bytes]], + read_exact: Callable[[int], Generator[None, None, bytes]], + read_to_eof: Callable[[], Generator[None, None, bytes]], + ) -> Generator[None, None, "Response"]: + """ + Parse an HTTP/1.1 response and return ``(status_code, reason, headers)``. + + ``reason`` and ``headers`` are expected to contain only ASCII characters. + Other characters are represented with surrogate escapes. + + :func:`parse_request` doesn't attempt to read the response body because + WebSocket handshake responses don't have one. If the response contains a + body, it may be read from ``stream`` after this coroutine returns. + + :param read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data + :param read_exact: generator-based coroutine that reads the requested + number of bytes or raises an exception if there isn't enough data + :raises EOFError: if the connection is closed without a full HTTP response + :raises SecurityError: if the response exceeds a security limit + :raises LookupError: if the response isn't well formatted + :raises ValueError: if the response isn't well formatted + + """ + # https://tools.ietf.org/html/rfc7230#section-3.1.2 + + # As in parse_request, parsing is simple because a fixed value is expected + # for version, status_code is a 3-digit number, and reason can be ignored. + + try: + status_line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP status line") from exc + + try: + version, raw_status_code, raw_reason = status_line.split(b" ", 2) + except ValueError: # not enough values to unpack (expected 3, got 1-2) + raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None + + if version != b"HTTP/1.1": + raise ValueError(f"unsupported HTTP version: {d(version)}") + try: + status_code = int(raw_status_code) + except ValueError: # invalid literal for int() with base 10 + raise ValueError( + f"invalid HTTP status code: {d(raw_status_code)}" + ) from None + if not 100 <= status_code < 1000: + raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}") + if not _value_re.fullmatch(raw_reason): + raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}") + reason = raw_reason.decode() + + headers = yield from parse_headers(read_line) + + # https://tools.ietf.org/html/rfc7230#section-3.3.3 + + if "Transfer-Encoding" in headers: + raise NotImplementedError("transfer codings aren't supported") + + # Since websockets only does GET requests (no HEAD, no CONNECT), all + # responses except 1xx, 204, and 304 include a message body. + if 100 <= status_code < 200 or status_code == 204 or status_code == 304: + body = None + else: + content_length: Optional[int] + try: + # MultipleValuesError is sufficiently unlikely that we don't + # attempt to handle it. Instead we document that its parent + # class, LookupError, may be raised. + raw_content_length = headers["Content-Length"] + except KeyError: + content_length = None + else: + content_length = int(raw_content_length) + + if content_length is None: + body = yield from read_to_eof() + else: + body = yield from read_exact(content_length) + + return cls(status_code, reason, headers, body) + + def serialize(self) -> bytes: + """ + Serialize an HTTP/1.1 GET response. + + """ + # Since the status line and headers only contain ASCII characters, + # we can keep this simple. + response = f"HTTP/1.1 {self.status_code} {self.reason_phrase}\r\n".encode() + response += self.headers.serialize() + if self.body is not None: + response += self.body + return response + + +def parse_headers( + read_line: Callable[[], Generator[None, None, bytes]] +) -> Generator[None, None, Headers]: + """ + Parse HTTP headers. + + Non-ASCII characters are represented with surrogate escapes. + + :param read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data + + """ + # https://tools.ietf.org/html/rfc7230#section-3.2 + + # We don't attempt to support obsolete line folding. + + headers = Headers() + for _ in range(MAX_HEADERS + 1): + try: + line = yield from parse_line(read_line) + except EOFError as exc: + raise EOFError("connection closed while reading HTTP headers") from exc + if line == b"": + break + + try: + raw_name, raw_value = line.split(b":", 1) + except ValueError: # not enough values to unpack (expected 2, got 1) + raise ValueError(f"invalid HTTP header line: {d(line)}") from None + if not _token_re.fullmatch(raw_name): + raise ValueError(f"invalid HTTP header name: {d(raw_name)}") + raw_value = raw_value.strip(b" \t") + if not _value_re.fullmatch(raw_value): + raise ValueError(f"invalid HTTP header value: {d(raw_value)}") + + name = raw_name.decode("ascii") # guaranteed to be ASCII at this point + value = raw_value.decode("ascii", "surrogateescape") + headers[name] = value + + else: + raise SecurityError("too many HTTP headers") + + return headers + + +def parse_line( + read_line: Callable[[], Generator[None, None, bytes]] +) -> Generator[None, None, bytes]: + """ + Parse a single line. + + CRLF is stripped from the return value. + + :param read_line: generator-based coroutine that reads a LF-terminated + line or raises an exception if there isn't enough data + + """ + # Security: TODO: add a limit here + line = yield from read_line() + # Security: this guarantees header values are small (hard-coded = 4 KiB) + if len(line) > MAX_LINE: + raise SecurityError("line too long") + # Not mandatory but safe - https://tools.ietf.org/html/rfc7230#section-3.5 + if not line.endswith(b"\r\n"): + raise EOFError("line without CRLF") + return line[:-2] diff --git a/tests/test_http11.py b/tests/test_http11.py new file mode 100644 index 000000000..bca874aee --- /dev/null +++ b/tests/test_http11.py @@ -0,0 +1,271 @@ +from websockets.datastructures import Headers +from websockets.exceptions import SecurityError +from websockets.http11 import * +from websockets.http11 import parse_headers +from websockets.streams import StreamReader + +from .utils import GeneratorTestCase + + +class RequestTests(GeneratorTestCase): + def setUp(self): + super().setUp() + self.reader = StreamReader() + + def parse(self): + return Request.parse(self.reader.read_line) + + def test_parse(self): + # Example from the protocol overview in RFC 6455 + self.reader.feed_data( + b"GET /chat HTTP/1.1\r\n" + b"Host: server.example.com\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + b"Origin: http://example.com\r\n" + b"Sec-WebSocket-Protocol: chat, superchat\r\n" + b"Sec-WebSocket-Version: 13\r\n" + b"\r\n" + ) + request = self.assertGeneratorReturns(self.parse()) + self.assertEqual(request.path, "/chat") + self.assertEqual(request.headers["Upgrade"], "websocket") + + def test_parse_empty(self): + self.reader.feed_eof() + with self.assertRaises(EOFError) as raised: + next(self.parse()) + self.assertEqual( + str(raised.exception), "connection closed while reading HTTP request line" + ) + + def test_parse_invalid_request_line(self): + self.reader.feed_data(b"GET /\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP request line: GET /") + + def test_parse_unsupported_method(self): + self.reader.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "unsupported HTTP method: OPTIONS") + + def test_parse_unsupported_version(self): + self.reader.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "unsupported HTTP version: HTTP/1.0") + + def test_parse_invalid_header(self): + self.reader.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP header line: Oops") + + def test_serialize(self): + # Example from the protocol overview in RFC 6455 + request = Request( + "/chat", + Headers( + [ + ("Host", "server.example.com"), + ("Upgrade", "websocket"), + ("Connection", "Upgrade"), + ("Sec-WebSocket-Key", "dGhlIHNhbXBsZSBub25jZQ=="), + ("Origin", "http://example.com"), + ("Sec-WebSocket-Protocol", "chat, superchat"), + ("Sec-WebSocket-Version", "13"), + ] + ), + ) + self.assertEqual( + request.serialize(), + b"GET /chat HTTP/1.1\r\n" + b"Host: server.example.com\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\r\n" + b"Origin: http://example.com\r\n" + b"Sec-WebSocket-Protocol: chat, superchat\r\n" + b"Sec-WebSocket-Version: 13\r\n" + b"\r\n", + ) + + +class ResponseTests(GeneratorTestCase): + def setUp(self): + super().setUp() + self.reader = StreamReader() + + def parse(self): + return Response.parse( + self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof + ) + + def test_parse(self): + # Example from the protocol overview in RFC 6455 + self.reader.feed_data( + b"HTTP/1.1 101 Switching Protocols\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" + b"Sec-WebSocket-Protocol: chat\r\n" + b"\r\n" + ) + response = self.assertGeneratorReturns(self.parse()) + self.assertEqual(response.status_code, 101) + self.assertEqual(response.reason_phrase, "Switching Protocols") + self.assertEqual(response.headers["Upgrade"], "websocket") + self.assertIsNone(response.body) + + def test_parse_empty(self): + self.reader.feed_eof() + with self.assertRaises(EOFError) as raised: + next(self.parse()) + self.assertEqual( + str(raised.exception), "connection closed while reading HTTP status line" + ) + + def test_parse_invalid_status_line(self): + self.reader.feed_data(b"Hello!\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP status line: Hello!") + + def test_parse_unsupported_version(self): + self.reader.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "unsupported HTTP version: HTTP/1.0") + + def test_parse_invalid_status(self): + self.reader.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP status code: OMG") + + def test_parse_unsupported_status(self): + self.reader.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "unsupported HTTP status code: 007") + + def test_parse_invalid_reason(self): + self.reader.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP reason phrase: \x7f") + + def test_parse_invalid_header(self): + self.reader.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") + with self.assertRaises(ValueError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "invalid HTTP header line: Oops") + + def test_parse_body_with_content_length(self): + self.reader.feed_data( + b"HTTP/1.1 200 OK\r\nContent-Length: 13\r\n\r\nHello world!\n" + ) + response = self.assertGeneratorReturns(self.parse()) + self.assertEqual(response.body, b"Hello world!\n") + + def test_parse_body_without_content_length(self): + self.reader.feed_data(b"HTTP/1.1 200 OK\r\n\r\nHello world!\n") + gen = self.parse() + self.assertGeneratorRunning(gen) + self.reader.feed_eof() + response = self.assertGeneratorReturns(gen) + self.assertEqual(response.body, b"Hello world!\n") + + def test_parse_body_with_transfer_encoding(self): + self.reader.feed_data(b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n") + with self.assertRaises(NotImplementedError) as raised: + next(self.parse()) + self.assertEqual(str(raised.exception), "transfer codings aren't supported") + + def test_parse_body_no_content(self): + self.reader.feed_data(b"HTTP/1.1 204 No Content\r\n\r\n") + response = self.assertGeneratorReturns(self.parse()) + self.assertIsNone(response.body) + + def test_parse_body_not_modified(self): + self.reader.feed_data(b"HTTP/1.1 304 Not Modified\r\n\r\n") + response = self.assertGeneratorReturns(self.parse()) + self.assertIsNone(response.body) + + def test_serialize(self): + # Example from the protocol overview in RFC 6455 + response = Response( + 101, + "Switching Protocols", + Headers( + [ + ("Upgrade", "websocket"), + ("Connection", "Upgrade"), + ("Sec-WebSocket-Accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="), + ("Sec-WebSocket-Protocol", "chat"), + ] + ), + ) + self.assertEqual( + response.serialize(), + b"HTTP/1.1 101 Switching Protocols\r\n" + b"Upgrade: websocket\r\n" + b"Connection: Upgrade\r\n" + b"Sec-WebSocket-Accept: s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\r\n" + b"Sec-WebSocket-Protocol: chat\r\n" + b"\r\n", + ) + + def test_serialize_with_body(self): + response = Response( + 200, + "OK", + Headers([("Content-Length", "13"), ("Content-Type", "text/plain")]), + b"Hello world!\n", + ) + self.assertEqual( + response.serialize(), + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: 13\r\n" + b"Content-Type: text/plain\r\n" + b"\r\n" + b"Hello world!\n", + ) + + +class HeadersTests(GeneratorTestCase): + def setUp(self): + super().setUp() + self.reader = StreamReader() + + def parse_headers(self): + return parse_headers(self.reader.read_line) + + def test_parse_invalid_name(self): + self.reader.feed_data(b"foo bar: baz qux\r\n\r\n") + with self.assertRaises(ValueError): + next(self.parse_headers()) + + def test_parse_invalid_value(self): + self.reader.feed_data(b"foo: \x00\x00\x0f\r\n\r\n") + with self.assertRaises(ValueError): + next(self.parse_headers()) + + def test_parse_too_long_value(self): + self.reader.feed_data(b"foo: bar\r\n" * 257 + b"\r\n") + with self.assertRaises(SecurityError): + next(self.parse_headers()) + + def test_parse_too_long_line(self): + # Header line contains 5 + 4090 + 2 = 4097 bytes. + self.reader.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") + with self.assertRaises(SecurityError): + next(self.parse_headers()) + + def test_parse_invalid_line_ending(self): + self.reader.feed_data(b"foo: bar\n\n") + with self.assertRaises(EOFError): + next(self.parse_headers()) From e4bc504a880110b0d3cd1dbc8e55b69a5f44ee7c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 6 Oct 2019 20:00:10 +0200 Subject: [PATCH 202/281] Salvage accept() from the legacy handshake module. --- src/websockets/handshake.py | 3 --- src/websockets/handshake_legacy.py | 8 +------- src/websockets/utils.py | 18 +++++++++++++++++- tests/test_handshake_legacy.py | 10 ++-------- tests/test_utils.py | 16 +++++++++++++--- 5 files changed, 33 insertions(+), 22 deletions(-) diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index f27bd1b84..3ff6c005d 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -6,9 +6,6 @@ __all__ = ["build_request", "check_request", "build_response", "check_response"] -GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - - # Backwards compatibility with previously documented public APIs diff --git a/src/websockets/handshake_legacy.py b/src/websockets/handshake_legacy.py index 9683e8556..1f6c58e1b 100644 --- a/src/websockets/handshake_legacy.py +++ b/src/websockets/handshake_legacy.py @@ -27,15 +27,14 @@ import base64 import binascii -import hashlib import random from typing import List from .datastructures import Headers, MultipleValuesError from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade -from .handshake import GUID from .headers import parse_connection, parse_upgrade from .typing import ConnectionOption, UpgradeProtocol +from .utils import accept_key as accept __all__ = ["build_request", "check_request", "build_response", "check_response"] @@ -180,8 +179,3 @@ def check_response(headers: Headers, key: str) -> None: if s_w_accept != accept(key): raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) - - -def accept(key: str) -> str: - sha1 = hashlib.sha1((key + GUID).encode()).digest() - return base64.b64encode(sha1).decode() diff --git a/src/websockets/utils.py b/src/websockets/utils.py index 40ac8559f..f9d0ca763 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -1,7 +1,23 @@ +import base64 +import hashlib import itertools -__all__ = ["apply_mask"] +__all__ = ["accept_key", "apply_mask"] + + +GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + +def accept_key(key: str) -> str: + """ + Compute the value of the Sec-WebSocket-Accept header. + + :param key: value of the Sec-WebSocket-Key header + + """ + sha1 = hashlib.sha1((key + GUID).encode()).digest() + return base64.b64encode(sha1).decode() def apply_mask(data: bytes, mask: bytes) -> bytes: diff --git a/tests/test_handshake_legacy.py b/tests/test_handshake_legacy.py index 361410d3f..c34b94e41 100644 --- a/tests/test_handshake_legacy.py +++ b/tests/test_handshake_legacy.py @@ -9,16 +9,10 @@ InvalidUpgrade, ) from websockets.handshake_legacy import * -from websockets.handshake_legacy import accept # private API +from websockets.utils import accept_key class HandshakeTests(unittest.TestCase): - def test_accept(self): - # Test vector from RFC 6455 - key = "dGhlIHNhbXBsZSBub25jZQ==" - acc = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo=" - self.assertEqual(accept(key), acc) - def test_round_trip(self): request_headers = Headers() request_key = build_request(request_headers) @@ -178,7 +172,7 @@ def test_response_invalid_accept(self): with self.assertInvalidResponseHeaders(InvalidHeaderValue) as headers: del headers["Sec-WebSocket-Accept"] other_key = "1Eq4UDEFQYg3YspNgqxv5g==" - headers["Sec-WebSocket-Accept"] = accept(other_key) + headers["Sec-WebSocket-Accept"] = accept_key(other_key) def test_response_missing_accept(self): with self.assertInvalidResponseHeaders(InvalidHeader) as headers: diff --git a/tests/test_utils.py b/tests/test_utils.py index e5570f098..7d5417d79 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,10 +1,20 @@ import itertools import unittest -from websockets.utils import apply_mask as py_apply_mask +from websockets.utils import accept_key, apply_mask as py_apply_mask -class UtilsTests(unittest.TestCase): +# Test vector from RFC 6455 +KEY = "dGhlIHNhbXBsZSBub25jZQ==" +ACCEPT = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo=" + + +class AcceptKeyTests(unittest.TestCase): + def test_accept_key(self): + self.assertEqual(accept_key(KEY), ACCEPT) + + +class ApplyMaskTests(unittest.TestCase): @staticmethod def apply_mask(*args, **kwargs): return py_apply_mask(*args, **kwargs) @@ -73,7 +83,7 @@ def test_apply_mask_check_mask_length(self): pass else: - class SpeedupsTests(UtilsTests): + class SpeedupsTests(ApplyMaskTests): @staticmethod def apply_mask(*args, **kwargs): return c_apply_mask(*args, **kwargs) From cf5af352200e6800f0152c8399af067a45053d76 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 25 Jan 2020 21:32:21 +0100 Subject: [PATCH 203/281] Extract generate_key() from the legacy handshake module. --- src/websockets/handshake_legacy.py | 6 ++---- src/websockets/utils.py | 10 ++++++++++ tests/test_utils.py | 9 +++++++-- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/websockets/handshake_legacy.py b/src/websockets/handshake_legacy.py index 1f6c58e1b..7e6acc77d 100644 --- a/src/websockets/handshake_legacy.py +++ b/src/websockets/handshake_legacy.py @@ -27,14 +27,13 @@ import base64 import binascii -import random from typing import List from .datastructures import Headers, MultipleValuesError from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade from .headers import parse_connection, parse_upgrade from .typing import ConnectionOption, UpgradeProtocol -from .utils import accept_key as accept +from .utils import accept_key as accept, generate_key __all__ = ["build_request", "check_request", "build_response", "check_response"] @@ -50,8 +49,7 @@ def build_request(headers: Headers) -> str: :returns: ``key`` which must be passed to :func:`check_response` """ - raw_key = bytes(random.getrandbits(8) for _ in range(16)) - key = base64.b64encode(raw_key).decode() + key = generate_key() headers["Upgrade"] = "websocket" headers["Connection"] = "Upgrade" headers["Sec-WebSocket-Key"] = key diff --git a/src/websockets/utils.py b/src/websockets/utils.py index f9d0ca763..a2fe8cc7f 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -1,6 +1,7 @@ import base64 import hashlib import itertools +import random __all__ = ["accept_key", "apply_mask"] @@ -9,6 +10,15 @@ GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" +def generate_key() -> str: + """ + Generate a random key for the Sec-WebSocket-Key header. + + """ + key = bytes(random.getrandbits(8) for _ in range(16)) + return base64.b64encode(key).decode() + + def accept_key(key: str) -> str: """ Compute the value of the Sec-WebSocket-Accept header. diff --git a/tests/test_utils.py b/tests/test_utils.py index 7d5417d79..b490c2409 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,7 +1,8 @@ +import base64 import itertools import unittest -from websockets.utils import accept_key, apply_mask as py_apply_mask +from websockets.utils import accept_key, apply_mask as py_apply_mask, generate_key # Test vector from RFC 6455 @@ -9,7 +10,11 @@ ACCEPT = "s3pPLMBiTxaQ9kYGzzhZRbK+xOo=" -class AcceptKeyTests(unittest.TestCase): +class UtilsTests(unittest.TestCase): + def test_generate_key(self): + key = generate_key() + self.assertEqual(len(base64.b64decode(key.encode())), 16) + def test_accept_key(self): self.assertEqual(accept_key(KEY), ACCEPT) From 1af2296159b0e5165bbcf4b636ed7a06520928ab Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 14 Jun 2020 10:27:30 +0200 Subject: [PATCH 204/281] Take advantage of the secrets module. Per RFC 6455, "the masking key MUST be derived from a strong source of entropy." There is no such requirement Sec-WebSocket-Key but it seems better anyway. --- src/websockets/frames.py | 12 ++++++------ src/websockets/utils.py | 4 ++-- tests/test_frames.py | 5 ++--- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/websockets/frames.py b/src/websockets/frames.py index 5ed8e483f..56dcf6171 100644 --- a/src/websockets/frames.py +++ b/src/websockets/frames.py @@ -4,7 +4,7 @@ """ import io -import random +import secrets import struct from typing import Callable, Generator, NamedTuple, Optional, Sequence, Tuple @@ -120,12 +120,12 @@ def parse( f"payload length exceeds size limit ({length} > {max_size} bytes)" ) if mask: - mask_bits = yield from read_exact(4) + mask_bytes = yield from read_exact(4) # Read the data. data = yield from read_exact(length) if mask: - data = apply_mask(data, mask_bits) + data = apply_mask(data, mask_bytes) frame = cls(fin, opcode, data, rsv1, rsv2, rsv3) @@ -186,12 +186,12 @@ def serialize( output.write(struct.pack("!BBQ", head1, head2 | 127, length)) if mask: - mask_bits = struct.pack("!I", random.getrandbits(32)) - output.write(mask_bits) + mask_bytes = secrets.token_bytes(4) + output.write(mask_bytes) # Prepare the data. if mask: - data = apply_mask(self.data, mask_bits) + data = apply_mask(self.data, mask_bytes) else: data = self.data output.write(data) diff --git a/src/websockets/utils.py b/src/websockets/utils.py index a2fe8cc7f..59210e438 100644 --- a/src/websockets/utils.py +++ b/src/websockets/utils.py @@ -1,7 +1,7 @@ import base64 import hashlib import itertools -import random +import secrets __all__ = ["accept_key", "apply_mask"] @@ -15,7 +15,7 @@ def generate_key() -> str: Generate a random key for the Sec-WebSocket-Key header. """ - key = bytes(random.getrandbits(8) for _ in range(16)) + key = secrets.token_bytes(16) return base64.b64encode(key).decode() diff --git a/tests/test_frames.py b/tests/test_frames.py index 39d4055a8..37a73b2df 100644 --- a/tests/test_frames.py +++ b/tests/test_frames.py @@ -1,5 +1,4 @@ import codecs -import struct import unittest import unittest.mock @@ -26,8 +25,8 @@ def round_trip(self, data, frame, mask=False, extensions=None): # Make masking deterministic by reusing the same "random" mask. # This has an effect only when mask is True. - randbits = struct.unpack("!I", data[2:6])[0] if mask else 0 - with unittest.mock.patch("random.getrandbits", return_value=randbits): + mask_bytes = data[2:6] if mask else b"" + with unittest.mock.patch("secrets.token_bytes", return_value=mask_bytes): serialized = parsed.serialize(mask=mask, extensions=extensions) self.assertEqual(serialized, data) From 6bce2489660daf09f1e6bdf121cabdea83128e4e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 Feb 2020 09:40:33 +0100 Subject: [PATCH 205/281] Move asyncio client and server out of the way. --- src/websockets/asyncio_client.py | 588 ++++++++++ src/websockets/asyncio_server.py | 1004 ++++++++++++++++ src/websockets/auth.py | 2 +- src/websockets/client.py | 592 +--------- src/websockets/server.py | 1009 +---------------- ...erver.py => test_asyncio_client_server.py} | 28 +- tests/test_auth.py | 2 +- 7 files changed, 1623 insertions(+), 1602 deletions(-) create mode 100644 src/websockets/asyncio_client.py create mode 100644 src/websockets/asyncio_server.py rename tests/{test_client_server.py => test_asyncio_client_server.py} (98%) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py new file mode 100644 index 000000000..f95dae060 --- /dev/null +++ b/src/websockets/asyncio_client.py @@ -0,0 +1,588 @@ +""" +:mod:`websockets.client` defines the WebSocket client APIs. + +""" + +import asyncio +import collections.abc +import functools +import logging +import warnings +from types import TracebackType +from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast + +from .datastructures import Headers, HeadersLike +from .exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidMessage, + InvalidStatusCode, + NegotiationError, + RedirectHandshake, + SecurityError, +) +from .extensions.base import ClientExtensionFactory, Extension +from .extensions.permessage_deflate import ClientPerMessageDeflateFactory +from .handshake_legacy import build_request, check_response +from .headers import ( + build_authorization_basic, + build_extension, + build_subprotocol, + parse_extension, + parse_subprotocol, +) +from .http import USER_AGENT +from .http_legacy import read_response +from .protocol import WebSocketCommonProtocol +from .typing import ExtensionHeader, Origin, Subprotocol +from .uri import WebSocketURI, parse_uri + + +__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] + +logger = logging.getLogger(__name__) + + +class WebSocketClientProtocol(WebSocketCommonProtocol): + """ + :class:`~asyncio.Protocol` subclass implementing a WebSocket client. + + This class inherits most of its methods from + :class:`~websockets.protocol.WebSocketCommonProtocol`. + + """ + + is_client = True + side = "client" + + def __init__( + self, + *, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + **kwargs: Any, + ) -> None: + self.origin = origin + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + super().__init__(**kwargs) + + def write_http_request(self, path: str, headers: Headers) -> None: + """ + Write request line and headers to the HTTP request. + + """ + self.path = path + self.request_headers = headers + + logger.debug("%s > GET %s HTTP/1.1", self.side, path) + logger.debug("%s > %r", self.side, headers) + + # Since the path and headers only contain ASCII characters, + # we can keep this simple. + request = f"GET {path} HTTP/1.1\r\n" + request += str(headers) + + self.transport.write(request.encode()) + + async def read_http_response(self) -> Tuple[int, Headers]: + """ + Read status line and headers from the HTTP response. + + If the response contains a body, it may be read from ``self.reader`` + after this coroutine returns. + + :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is + malformed or isn't an HTTP/1.1 GET response + + """ + try: + status_code, reason, headers = await read_response(self.reader) + except asyncio.CancelledError: # pragma: no cover + raise + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP response") from exc + + logger.debug("%s < HTTP/1.1 %d %s", self.side, status_code, reason) + logger.debug("%s < %r", self.side, headers) + + self.response_headers = headers + + return status_code, self.response_headers + + @staticmethod + def process_extensions( + headers: Headers, + available_extensions: Optional[Sequence[ClientExtensionFactory]], + ) -> List[Extension]: + """ + Handle the Sec-WebSocket-Extensions HTTP response header. + + Check that each extension is supported, as well as its parameters. + + Return the list of accepted extensions. + + Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the + connection. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension accepted by + the server, we check for a match with each extension available in the + client configuration. If no match is found, an exception is raised. + + If several variants of the same extension are accepted by the server, + it may be configured several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + """ + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values: + + if available_extensions is None: + raise InvalidHandshake("no extensions supported") + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, response_params in parsed_header_values: + + for extension_factory in available_extensions: + + # Skip non-matching extensions based on their name. + if extension_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + extension = extension_factory.process_response_params( + response_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the server sent. Fail the connection. + else: + raise NegotiationError( + f"Unsupported extension: " + f"name = {name}, params = {response_params}" + ) + + return accepted_extensions + + @staticmethod + def process_subprotocol( + headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP response header. + + Check that it contains exactly one supported subprotocol. + + Return the selected subprotocol. + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values: + + if available_subprotocols is None: + raise InvalidHandshake("no subprotocols supported") + + parsed_header_values: Sequence[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + if len(parsed_header_values) > 1: + subprotocols = ", ".join(parsed_header_values) + raise InvalidHandshake(f"multiple subprotocols: {subprotocols}") + + subprotocol = parsed_header_values[0] + + if subprotocol not in available_subprotocols: + raise NegotiationError(f"unsupported subprotocol: {subprotocol}") + + return subprotocol + + async def handshake( + self, + wsuri: WebSocketURI, + origin: Optional[Origin] = None, + available_extensions: Optional[Sequence[ClientExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + ) -> None: + """ + Perform the client side of the opening handshake. + + :param origin: sets the Origin HTTP header + :param available_extensions: list of supported extensions in the order + in which they should be used + :param available_subprotocols: list of supported subprotocols in order + of decreasing preference + :param extra_headers: sets additional HTTP request headers; it must be + a :class:`~websockets.http.Headers` instance, a + :class:`~collections.abc.Mapping`, or an iterable of ``(name, + value)`` pairs + :raises ~websockets.exceptions.InvalidHandshake: if the handshake + fails + + """ + request_headers = Headers() + + if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover + request_headers["Host"] = wsuri.host + else: + request_headers["Host"] = f"{wsuri.host}:{wsuri.port}" + + if wsuri.user_info: + request_headers["Authorization"] = build_authorization_basic( + *wsuri.user_info + ) + + if origin is not None: + request_headers["Origin"] = origin + + key = build_request(request_headers) + + if available_extensions is not None: + extensions_header = build_extension( + [ + (extension_factory.name, extension_factory.get_request_params()) + for extension_factory in available_extensions + ] + ) + request_headers["Sec-WebSocket-Extensions"] = extensions_header + + if available_subprotocols is not None: + protocol_header = build_subprotocol(available_subprotocols) + request_headers["Sec-WebSocket-Protocol"] = protocol_header + + if extra_headers is not None: + if isinstance(extra_headers, Headers): + extra_headers = extra_headers.raw_items() + elif isinstance(extra_headers, collections.abc.Mapping): + extra_headers = extra_headers.items() + for name, value in extra_headers: + request_headers[name] = value + + request_headers.setdefault("User-Agent", USER_AGENT) + + self.write_http_request(wsuri.resource_name, request_headers) + + status_code, response_headers = await self.read_http_response() + if status_code in (301, 302, 303, 307, 308): + if "Location" not in response_headers: + raise InvalidHeader("Location") + raise RedirectHandshake(response_headers["Location"]) + elif status_code != 101: + raise InvalidStatusCode(status_code) + + check_response(response_headers, key) + + self.extensions = self.process_extensions( + response_headers, available_extensions + ) + + self.subprotocol = self.process_subprotocol( + response_headers, available_subprotocols + ) + + self.connection_open() + + +class Connect: + """ + Connect to the WebSocket server at the given ``uri``. + + Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which + can then be used to send and receive messages. + + :func:`connect` can also be used as a asynchronous context manager. In + that case, the connection is closed when exiting the context. + + :func:`connect` is a wrapper around the event loop's + :meth:`~asyncio.loop.create_connection` method. Unknown keyword arguments + are passed to :meth:`~asyncio.loop.create_connection`. + + For example, you can set the ``ssl`` keyword argument to a + :class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to + a ``wss://`` URI, if this argument isn't provided explicitly, + :func:`ssl.create_default_context` is called to create a context. + + You can connect to a different host and port from those found in ``uri`` + by setting ``host`` and ``port`` keyword arguments. This only changes the + destination of the TCP connection. The host name from ``uri`` is still + used in the TLS handshake for secure connections and in the ``Host`` HTTP + header. + + The ``create_protocol`` parameter allows customizing the + :class:`~asyncio.Protocol` that manages the connection. It should be a + callable or class accepting the same arguments as + :class:`WebSocketClientProtocol` and returning an instance of + :class:`WebSocketClientProtocol` or a subclass. It defaults to + :class:`WebSocketClientProtocol`. + + The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is + described in :class:`~websockets.protocol.WebSocketCommonProtocol`. + + :func:`connect` also accepts the following optional arguments: + + * ``compression`` is a shortcut to configure compression extensions; + by default it enables the "permessage-deflate" extension; set it to + ``None`` to disable compression + * ``origin`` sets the Origin HTTP header + * ``extensions`` is a list of supported extensions in order of + decreasing preference + * ``subprotocols`` is a list of supported subprotocols in order of + decreasing preference + * ``extra_headers`` sets additional HTTP request headers; it can be a + :class:`~websockets.http.Headers` instance, a + :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` + pairs + + :raises ~websockets.uri.InvalidURI: if ``uri`` is invalid + :raises ~websockets.handshake.InvalidHandshake: if the opening handshake + fails + + """ + + MAX_REDIRECTS_ALLOWED = 10 + + def __init__( + self, + uri: str, + *, + path: Optional[str] = None, + create_protocol: Optional[Type[WebSocketClientProtocol]] = None, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + legacy_recv: bool = False, + klass: Optional[Type[WebSocketClientProtocol]] = None, + timeout: Optional[float] = None, + compression: Optional[str] = "deflate", + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + **kwargs: Any, + ) -> None: + # Backwards compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + # Backwards compatibility: create_protocol used to be called klass. + if klass is None: + klass = WebSocketClientProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) + # If both are specified, klass is ignored. + if create_protocol is None: + create_protocol = klass + + if loop is None: + loop = asyncio.get_event_loop() + + wsuri = parse_uri(uri) + if wsuri.secure: + kwargs.setdefault("ssl", True) + elif kwargs.get("ssl") is not None: + raise ValueError( + "connect() received a ssl argument for a ws:// URI, " + "use a wss:// URI to enable TLS" + ) + + if compression == "deflate": + if extensions is None: + extensions = [] + if not any( + extension_factory.name == ClientPerMessageDeflateFactory.name + for extension_factory in extensions + ): + extensions = list(extensions) + [ + ClientPerMessageDeflateFactory(client_max_window_bits=True) + ] + elif compression is not None: + raise ValueError(f"unsupported compression: {compression}") + + factory = functools.partial( + create_protocol, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + close_timeout=close_timeout, + max_size=max_size, + max_queue=max_queue, + read_limit=read_limit, + write_limit=write_limit, + loop=loop, + host=wsuri.host, + port=wsuri.port, + secure=wsuri.secure, + legacy_recv=legacy_recv, + origin=origin, + extensions=extensions, + subprotocols=subprotocols, + extra_headers=extra_headers, + ) + + if path is None: + host: Optional[str] + port: Optional[int] + if kwargs.get("sock") is None: + host, port = wsuri.host, wsuri.port + else: + # If sock is given, host and port shouldn't be specified. + host, port = None, None + # If host and port are given, override values from the URI. + host = kwargs.pop("host", host) + port = kwargs.pop("port", port) + create_connection = functools.partial( + loop.create_connection, factory, host, port, **kwargs + ) + else: + create_connection = functools.partial( + loop.create_unix_connection, factory, path, **kwargs + ) + + # This is a coroutine function. + self._create_connection = create_connection + self._wsuri = wsuri + + def handle_redirect(self, uri: str) -> None: + # Update the state of this instance to connect to a new URI. + old_wsuri = self._wsuri + new_wsuri = parse_uri(uri) + + # Forbid TLS downgrade. + if old_wsuri.secure and not new_wsuri.secure: + raise SecurityError("redirect from WSS to WS") + + same_origin = ( + old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port + ) + + # Rewrite the host and port arguments for cross-origin redirects. + # This preserves connection overrides with the host and port + # arguments if the redirect points to the same host and port. + if not same_origin: + # Replace the host and port argument passed to the protocol factory. + factory = self._create_connection.args[0] + factory = functools.partial( + factory.func, + *factory.args, + **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port), + ) + # Replace the host and port argument passed to create_connection. + self._create_connection = functools.partial( + self._create_connection.func, + *(factory, new_wsuri.host, new_wsuri.port), + **self._create_connection.keywords, + ) + + # Set the new WebSocket URI. This suffices for same-origin redirects. + self._wsuri = new_wsuri + + # async with connect(...) + + async def __aenter__(self) -> WebSocketClientProtocol: + return await self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.ws_client.close() + + # await connect(...) + + def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl__().__await__() + + async def __await_impl__(self) -> WebSocketClientProtocol: + for redirects in range(self.MAX_REDIRECTS_ALLOWED): + transport, protocol = await self._create_connection() + # https://github.com/python/typeshed/pull/2756 + transport = cast(asyncio.Transport, transport) + protocol = cast(WebSocketClientProtocol, protocol) + + try: + try: + await protocol.handshake( + self._wsuri, + origin=protocol.origin, + available_extensions=protocol.available_extensions, + available_subprotocols=protocol.available_subprotocols, + extra_headers=protocol.extra_headers, + ) + except Exception: + protocol.fail_connection() + await protocol.wait_closed() + raise + else: + self.ws_client = protocol + return protocol + except RedirectHandshake as exc: + self.handle_redirect(exc.uri) + else: + raise SecurityError("too many redirects") + + # yield from connect(...) + + __iter__ = __await__ + + +connect = Connect + + +def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Connect: + """ + Similar to :func:`connect`, but for connecting to a Unix socket. + + This function calls the event loop's + :meth:`~asyncio.loop.create_unix_connection` method. + + It is only available on Unix. + + It's mainly useful for debugging servers listening on Unix sockets. + + :param path: file system path to the Unix socket + :param uri: WebSocket URI + + """ + return connect(uri=uri, path=path, **kwargs) diff --git a/src/websockets/asyncio_server.py b/src/websockets/asyncio_server.py new file mode 100644 index 000000000..1eeddf0eb --- /dev/null +++ b/src/websockets/asyncio_server.py @@ -0,0 +1,1004 @@ +""" +:mod:`websockets.server` defines the WebSocket server APIs. + +""" + +import asyncio +import collections.abc +import email.utils +import functools +import http +import logging +import socket +import sys +import warnings +from types import TracebackType +from typing import ( + Any, + Awaitable, + Callable, + Generator, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) + +from .datastructures import Headers, HeadersLike, MultipleValuesError +from .exceptions import ( + AbortHandshake, + InvalidHandshake, + InvalidHeader, + InvalidMessage, + InvalidOrigin, + InvalidUpgrade, + NegotiationError, +) +from .extensions.base import Extension, ServerExtensionFactory +from .extensions.permessage_deflate import ServerPerMessageDeflateFactory +from .handshake_legacy import build_response, check_request +from .headers import build_extension, parse_extension, parse_subprotocol +from .http import USER_AGENT +from .http_legacy import read_request +from .protocol import WebSocketCommonProtocol +from .typing import ExtensionHeader, Origin, Subprotocol + + +__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] + +logger = logging.getLogger(__name__) + + +HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] + +HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes] + + +class WebSocketServerProtocol(WebSocketCommonProtocol): + """ + :class:`~asyncio.Protocol` subclass implementing a WebSocket server. + + This class inherits most of its methods from + :class:`~websockets.protocol.WebSocketCommonProtocol`. + + For the sake of simplicity, it doesn't rely on a full HTTP implementation. + Its support for HTTP responses is very limited. + + """ + + is_client = False + side = "server" + + def __init__( + self, + ws_handler: Callable[["WebSocketServerProtocol", str], Awaitable[Any]], + ws_server: "WebSocketServer", + *, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] + ] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, + **kwargs: Any, + ) -> None: + # For backwards compatibility with 6.0 or earlier. + if origins is not None and "" in origins: + warnings.warn("use None instead of '' in origins", DeprecationWarning) + origins = [None if origin == "" else origin for origin in origins] + self.ws_handler = ws_handler + self.ws_server = ws_server + self.origins = origins + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + self._process_request = process_request + self._select_subprotocol = select_subprotocol + super().__init__(**kwargs) + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """ + Register connection and initialize a task to handle it. + + """ + super().connection_made(transport) + # Register the connection with the server before creating the handler + # task. Registering at the beginning of the handler coroutine would + # create a race condition between the creation of the task, which + # schedules its execution, and the moment the handler starts running. + self.ws_server.register(self) + self.handler_task = self.loop.create_task(self.handler()) + + async def handler(self) -> None: + """ + Handle the lifecycle of a WebSocket connection. + + Since this method doesn't have a caller able to handle exceptions, it + attemps to log relevant ones and guarantees that the TCP connection is + closed before exiting. + + """ + try: + + try: + path = await self.handshake( + origins=self.origins, + available_extensions=self.available_extensions, + available_subprotocols=self.available_subprotocols, + extra_headers=self.extra_headers, + ) + except asyncio.CancelledError: # pragma: no cover + raise + except ConnectionError: + logger.debug("Connection error in opening handshake", exc_info=True) + raise + except Exception as exc: + if isinstance(exc, AbortHandshake): + status, headers, body = exc.status, exc.headers, exc.body + elif isinstance(exc, InvalidOrigin): + logger.debug("Invalid origin", exc_info=True) + status, headers, body = ( + http.HTTPStatus.FORBIDDEN, + Headers(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), + ) + elif isinstance(exc, InvalidUpgrade): + logger.debug("Invalid upgrade", exc_info=True) + status, headers, body = ( + http.HTTPStatus.UPGRADE_REQUIRED, + Headers([("Upgrade", "websocket")]), + ( + f"Failed to open a WebSocket connection: {exc}.\n" + f"\n" + f"You cannot access a WebSocket server directly " + f"with a browser. You need a WebSocket client.\n" + ).encode(), + ) + elif isinstance(exc, InvalidHandshake): + logger.debug("Invalid handshake", exc_info=True) + status, headers, body = ( + http.HTTPStatus.BAD_REQUEST, + Headers(), + f"Failed to open a WebSocket connection: {exc}.\n".encode(), + ) + else: + logger.warning("Error in opening handshake", exc_info=True) + status, headers, body = ( + http.HTTPStatus.INTERNAL_SERVER_ERROR, + Headers(), + ( + b"Failed to open a WebSocket connection.\n" + b"See server log for more information.\n" + ), + ) + + headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + headers.setdefault("Server", USER_AGENT) + headers.setdefault("Content-Length", str(len(body))) + headers.setdefault("Content-Type", "text/plain") + headers.setdefault("Connection", "close") + + self.write_http_response(status, headers, body) + self.fail_connection() + await self.wait_closed() + return + + try: + await self.ws_handler(self, path) + except Exception: + logger.error("Error in connection handler", exc_info=True) + if not self.closed: + self.fail_connection(1011) + raise + + try: + await self.close() + except ConnectionError: + logger.debug("Connection error in closing handshake", exc_info=True) + raise + except Exception: + logger.warning("Error in closing handshake", exc_info=True) + raise + + except Exception: + # Last-ditch attempt to avoid leaking connections on errors. + try: + self.transport.close() + except Exception: # pragma: no cover + pass + + finally: + # Unregister the connection with the server when the handler task + # terminates. Registration is tied to the lifecycle of the handler + # task because the server waits for tasks attached to registered + # connections before terminating. + self.ws_server.unregister(self) + + async def read_http_request(self) -> Tuple[str, Headers]: + """ + Read request line and headers from the HTTP request. + + If the request contains a body, it may be read from ``self.reader`` + after this coroutine returns. + + :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is + malformed or isn't an HTTP/1.1 GET request + + """ + try: + path, headers = await read_request(self.reader) + except asyncio.CancelledError: # pragma: no cover + raise + except Exception as exc: + raise InvalidMessage("did not receive a valid HTTP request") from exc + + logger.debug("%s < GET %s HTTP/1.1", self.side, path) + logger.debug("%s < %r", self.side, headers) + + self.path = path + self.request_headers = headers + + return path, headers + + def write_http_response( + self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None + ) -> None: + """ + Write status line and headers to the HTTP response. + + This coroutine is also able to write a response body. + + """ + self.response_headers = headers + + logger.debug("%s > HTTP/1.1 %d %s", self.side, status.value, status.phrase) + logger.debug("%s > %r", self.side, headers) + + # Since the status line and headers only contain ASCII characters, + # we can keep this simple. + response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" + response += str(headers) + + self.transport.write(response.encode()) + + if body is not None: + logger.debug("%s > body (%d bytes)", self.side, len(body)) + self.transport.write(body) + + async def process_request( + self, path: str, request_headers: Headers + ) -> Optional[HTTPResponse]: + """ + Intercept the HTTP request and return an HTTP response if appropriate. + + If ``process_request`` returns ``None``, the WebSocket handshake + continues. If it returns 3-uple containing a status code, response + headers and a response body, that HTTP response is sent and the + connection is closed. In that case: + + * The HTTP status must be a :class:`~http.HTTPStatus`. + * HTTP headers must be a :class:`~websockets.http.Headers` instance, a + :class:`~collections.abc.Mapping`, or an iterable of ``(name, + value)`` pairs. + * The HTTP response body must be :class:`bytes`. It may be empty. + + This coroutine may be overridden in a :class:`WebSocketServerProtocol` + subclass, for example: + + * to return a HTTP 200 OK response on a given path; then a load + balancer can use this path for a health check; + * to authenticate the request and return a HTTP 401 Unauthorized or a + HTTP 403 Forbidden when authentication fails. + + Instead of subclassing, it is possible to override this method by + passing a ``process_request`` argument to the :func:`serve` function + or the :class:`WebSocketServerProtocol` constructor. This is + equivalent, except ``process_request`` won't have access to the + protocol instance, so it can't store information for later use. + + ``process_request`` is expected to complete quickly. If it may run for + a long time, then it should await :meth:`wait_closed` and exit if + :meth:`wait_closed` completes, or else it could prevent the server + from shutting down. + + :param path: request path, including optional query string + :param request_headers: request headers + + """ + if self._process_request is not None: + response = self._process_request(path, request_headers) + if isinstance(response, Awaitable): + return await response + else: + # For backwards compatibility with 7.0. + warnings.warn( + "declare process_request as a coroutine", DeprecationWarning + ) + return response # type: ignore + return None + + @staticmethod + def process_origin( + headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None + ) -> Optional[Origin]: + """ + Handle the Origin HTTP request header. + + :param headers: request headers + :param origins: optional list of acceptable origins + :raises ~websockets.exceptions.InvalidOrigin: if the origin isn't + acceptable + + """ + # "The user agent MUST NOT include more than one Origin header field" + # per https://tools.ietf.org/html/rfc6454#section-7.3. + try: + origin = cast(Origin, headers.get("Origin")) + except MultipleValuesError as exc: + raise InvalidHeader("Origin", "more than one Origin header found") from exc + if origins is not None: + if origin not in origins: + raise InvalidOrigin(origin) + return origin + + @staticmethod + def process_extensions( + headers: Headers, + available_extensions: Optional[Sequence[ServerExtensionFactory]], + ) -> Tuple[Optional[str], List[Extension]]: + """ + Handle the Sec-WebSocket-Extensions HTTP request header. + + Accept or reject each extension proposed in the client request. + Negotiate parameters for accepted extensions. + + Return the Sec-WebSocket-Extensions HTTP response header and the list + of accepted extensions. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension proposed by + the client, we check for a match with each extension available in the + server configuration. If no match is found, the extension is ignored. + + If several variants of the same extension are proposed by the client, + it may be accepted several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + This process doesn't allow the server to reorder extensions. It can + only select a subset of the extensions proposed by the client. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + :param headers: request headers + :param extensions: optional list of supported extensions + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + + """ + response_header_value: Optional[str] = None + + extension_headers: List[ExtensionHeader] = [] + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values and available_extensions: + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, request_params in parsed_header_values: + + for ext_factory in available_extensions: + + # Skip non-matching extensions based on their name. + if ext_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + response_params, extension = ext_factory.process_request_params( + request_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + extension_headers.append((name, response_params)) + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the client sent. The extension is declined. + + # Serialize extension header. + if extension_headers: + response_header_value = build_extension(extension_headers) + + return response_header_value, accepted_extensions + + # Not @staticmethod because it calls self.select_subprotocol() + def process_subprotocol( + self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] + ) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP request header. + + Return Sec-WebSocket-Protocol HTTP response header, which is the same + as the selected subprotocol. + + :param headers: request headers + :param available_subprotocols: optional list of supported subprotocols + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values and available_subprotocols: + + parsed_header_values: List[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + subprotocol = self.select_subprotocol( + parsed_header_values, available_subprotocols + ) + + return subprotocol + + def select_subprotocol( + self, + client_subprotocols: Sequence[Subprotocol], + server_subprotocols: Sequence[Subprotocol], + ) -> Optional[Subprotocol]: + """ + Pick a subprotocol among those offered by the client. + + If several subprotocols are supported by the client and the server, + the default implementation selects the preferred subprotocols by + giving equal value to the priorities of the client and the server. + + If no subprotocol is supported by the client and the server, it + proceeds without a subprotocol. + + This is unlikely to be the most useful implementation in practice, as + many servers providing a subprotocol will require that the client uses + that subprotocol. Such rules can be implemented in a subclass. + + Instead of subclassing, it is possible to override this method by + passing a ``select_subprotocol`` argument to the :func:`serve` + function or the :class:`WebSocketServerProtocol` constructor + + :param client_subprotocols: list of subprotocols offered by the client + :param server_subprotocols: list of subprotocols available on the server + + """ + if self._select_subprotocol is not None: + return self._select_subprotocol(client_subprotocols, server_subprotocols) + + subprotocols = set(client_subprotocols) & set(server_subprotocols) + if not subprotocols: + return None + priority = lambda p: ( + client_subprotocols.index(p) + server_subprotocols.index(p) + ) + return sorted(subprotocols, key=priority)[0] + + async def handshake( + self, + origins: Optional[Sequence[Optional[Origin]]] = None, + available_extensions: Optional[Sequence[ServerExtensionFactory]] = None, + available_subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + ) -> str: + """ + Perform the server side of the opening handshake. + + Return the path of the URI of the request. + + :param origins: list of acceptable values of the Origin HTTP header; + include ``None`` if the lack of an origin is acceptable + :param available_extensions: list of supported extensions in the order + in which they should be used + :param available_subprotocols: list of supported subprotocols in order + of decreasing preference + :param extra_headers: sets additional HTTP response headers when the + handshake succeeds; it can be a :class:`~websockets.http.Headers` + instance, a :class:`~collections.abc.Mapping`, an iterable of + ``(name, value)`` pairs, or a callable taking the request path and + headers in arguments and returning one of the above. + :raises ~websockets.exceptions.InvalidHandshake: if the handshake + fails + + """ + path, request_headers = await self.read_http_request() + + # Hook for customizing request handling, for example checking + # authentication or treating some paths as plain HTTP endpoints. + early_response_awaitable = self.process_request(path, request_headers) + if isinstance(early_response_awaitable, Awaitable): + early_response = await early_response_awaitable + else: + # For backwards compatibility with 7.0. + warnings.warn("declare process_request as a coroutine", DeprecationWarning) + early_response = early_response_awaitable # type: ignore + + # Change the response to a 503 error if the server is shutting down. + if not self.ws_server.is_serving(): + early_response = ( + http.HTTPStatus.SERVICE_UNAVAILABLE, + [], + b"Server is shutting down.\n", + ) + + if early_response is not None: + raise AbortHandshake(*early_response) + + key = check_request(request_headers) + + self.origin = self.process_origin(request_headers, origins) + + extensions_header, self.extensions = self.process_extensions( + request_headers, available_extensions + ) + + protocol_header = self.subprotocol = self.process_subprotocol( + request_headers, available_subprotocols + ) + + response_headers = Headers() + + build_response(response_headers, key) + + if extensions_header is not None: + response_headers["Sec-WebSocket-Extensions"] = extensions_header + + if protocol_header is not None: + response_headers["Sec-WebSocket-Protocol"] = protocol_header + + if callable(extra_headers): + extra_headers = extra_headers(path, self.request_headers) + if extra_headers is not None: + if isinstance(extra_headers, Headers): + extra_headers = extra_headers.raw_items() + elif isinstance(extra_headers, collections.abc.Mapping): + extra_headers = extra_headers.items() + for name, value in extra_headers: + response_headers[name] = value + + response_headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + response_headers.setdefault("Server", USER_AGENT) + + self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers) + + self.connection_open() + + return path + + +class WebSocketServer: + """ + WebSocket server returned by :func:`~websockets.server.serve`. + + This class provides the same interface as + :class:`~asyncio.AbstractServer`, namely the + :meth:`~asyncio.AbstractServer.close` and + :meth:`~asyncio.AbstractServer.wait_closed` methods. + + It keeps track of WebSocket connections in order to close them properly + when shutting down. + + Instances of this class store a reference to the :class:`~asyncio.Server` + object returned by :meth:`~asyncio.loop.create_server` rather than inherit + from :class:`~asyncio.Server` in part because + :meth:`~asyncio.loop.create_server` doesn't support passing a custom + :class:`~asyncio.Server` class. + + """ + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + # Store a reference to loop to avoid relying on self.server._loop. + self.loop = loop + + # Keep track of active connections. + self.websockets: Set[WebSocketServerProtocol] = set() + + # Task responsible for closing the server and terminating connections. + self.close_task: Optional[asyncio.Task[None]] = None + + # Completed when the server is closed and connections are terminated. + self.closed_waiter: asyncio.Future[None] = loop.create_future() + + def wrap(self, server: asyncio.AbstractServer) -> None: + """ + Attach to a given :class:`~asyncio.Server`. + + Since :meth:`~asyncio.loop.create_server` doesn't support injecting a + custom ``Server`` class, the easiest solution that doesn't rely on + private :mod:`asyncio` APIs is to: + + - instantiate a :class:`WebSocketServer` + - give the protocol factory a reference to that instance + - call :meth:`~asyncio.loop.create_server` with the factory + - attach the resulting :class:`~asyncio.Server` with this method + + """ + self.server = server + + def register(self, protocol: WebSocketServerProtocol) -> None: + """ + Register a connection with this server. + + """ + self.websockets.add(protocol) + + def unregister(self, protocol: WebSocketServerProtocol) -> None: + """ + Unregister a connection with this server. + + """ + self.websockets.remove(protocol) + + def is_serving(self) -> bool: + """ + Tell whether the server is accepting new connections or shutting down. + + """ + try: + # Python ≥ 3.7 + return self.server.is_serving() + except AttributeError: # pragma: no cover + # Python < 3.7 + return self.server.sockets is not None + + def close(self) -> None: + """ + Close the server. + + This method: + + * closes the underlying :class:`~asyncio.Server`; + * rejects new WebSocket connections with an HTTP 503 (service + unavailable) error; this happens when the server accepted the TCP + connection but didn't complete the WebSocket opening handshake prior + to closing; + * closes open WebSocket connections with close code 1001 (going away). + + :meth:`close` is idempotent. + + """ + if self.close_task is None: + self.close_task = self.loop.create_task(self._close()) + + async def _close(self) -> None: + """ + Implementation of :meth:`close`. + + This calls :meth:`~asyncio.Server.close` on the underlying + :class:`~asyncio.Server` object to stop accepting new connections and + then closes open connections with close code 1001. + + """ + # Stop accepting new connections. + self.server.close() + + # Wait until self.server.close() completes. + await self.server.wait_closed() + + # Wait until all accepted connections reach connection_made() and call + # register(). See https://bugs.python.org/issue34852 for details. + await asyncio.sleep( + 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) + + # Close OPEN connections with status code 1001. Since the server was + # closed, handshake() closes OPENING conections with a HTTP 503 error. + # Wait until all connections are closed. + + # asyncio.wait doesn't accept an empty first argument + if self.websockets: + await asyncio.wait( + [ + asyncio.ensure_future(websocket.close(1001)) + for websocket in self.websockets + ], + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + + # Wait until all connection handlers are complete. + + # asyncio.wait doesn't accept an empty first argument. + if self.websockets: + await asyncio.wait( + [websocket.handler_task for websocket in self.websockets], + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + + # Tell wait_closed() to return. + self.closed_waiter.set_result(None) + + async def wait_closed(self) -> None: + """ + Wait until the server is closed. + + When :meth:`wait_closed` returns, all TCP connections are closed and + all connection handlers have returned. + + """ + await asyncio.shield(self.closed_waiter) + + @property + def sockets(self) -> Optional[List[socket.socket]]: + """ + List of :class:`~socket.socket` objects the server is listening to. + + ``None`` if the server is closed. + + """ + return self.server.sockets + + +class Serve: + """ + + Create, start, and return a WebSocket server on ``host`` and ``port``. + + Whenever a client connects, the server accepts the connection, creates a + :class:`WebSocketServerProtocol`, performs the opening handshake, and + delegates to the connection handler defined by ``ws_handler``. Once the + handler completes, either normally or with an exception, the server + performs the closing handshake and closes the connection. + + Awaiting :func:`serve` yields a :class:`WebSocketServer`. This instance + provides :meth:`~websockets.server.WebSocketServer.close` and + :meth:`~websockets.server.WebSocketServer.wait_closed` methods for + terminating the server and cleaning up its resources. + + When a server is closed with :meth:`~WebSocketServer.close`, it closes all + connections with close code 1001 (going away). Connections handlers, which + are running the ``ws_handler`` coroutine, will receive a + :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their + current or next interaction with the WebSocket connection. + + :func:`serve` can also be used as an asynchronous context manager. In + this case, the server is shut down when exiting the context. + + :func:`serve` is a wrapper around the event loop's + :meth:`~asyncio.loop.create_server` method. It creates and starts a + :class:`~asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it + wraps the :class:`~asyncio.Server` in a :class:`WebSocketServer` and + returns the :class:`WebSocketServer`. + + The ``ws_handler`` argument is the WebSocket handler. It must be a + coroutine accepting two arguments: a :class:`WebSocketServerProtocol` and + the request URI. + + The ``host`` and ``port`` arguments, as well as unrecognized keyword + arguments, are passed along to :meth:`~asyncio.loop.create_server`. + + For example, you can set the ``ssl`` keyword argument to a + :class:`~ssl.SSLContext` to enable TLS. + + The ``create_protocol`` parameter allows customizing the + :class:`~asyncio.Protocol` that manages the connection. It should be a + callable or class accepting the same arguments as + :class:`WebSocketServerProtocol` and returning an instance of + :class:`WebSocketServerProtocol` or a subclass. It defaults to + :class:`WebSocketServerProtocol`. + + The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, + ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is + described in :class:`~websockets.protocol.WebSocketCommonProtocol`. + + :func:`serve` also accepts the following optional arguments: + + * ``compression`` is a shortcut to configure compression extensions; + by default it enables the "permessage-deflate" extension; set it to + ``None`` to disable compression + * ``origins`` defines acceptable Origin HTTP headers; include ``None`` if + the lack of an origin is acceptable + * ``extensions`` is a list of supported extensions in order of + decreasing preference + * ``subprotocols`` is a list of supported subprotocols in order of + decreasing preference + * ``extra_headers`` sets additional HTTP response headers when the + handshake succeeds; it can be a :class:`~websockets.http.Headers` + instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, + value)`` pairs, or a callable taking the request path and headers in + arguments and returning one of the above + * ``process_request`` allows intercepting the HTTP request; it must be a + coroutine taking the request path and headers in argument; see + :meth:`~WebSocketServerProtocol.process_request` for details + * ``select_subprotocol`` allows customizing the logic for selecting a + subprotocol; it must be a callable taking the subprotocols offered by + the client and available on the server in argument; see + :meth:`~WebSocketServerProtocol.select_subprotocol` for details + + Since there's no useful way to propagate exceptions triggered in handlers, + they're sent to the ``'websockets.asyncio_server'`` logger instead. + Debugging is much easier if you configure logging to print them:: + + import logging + logger = logging.getLogger("websockets.asyncio_server") + logger.setLevel(logging.ERROR) + logger.addHandler(logging.StreamHandler()) + + """ + + def __init__( + self, + ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + host: Optional[Union[str, Sequence[str]]] = None, + port: Optional[int] = None, + *, + path: Optional[str] = None, + create_protocol: Optional[Type[WebSocketServerProtocol]] = None, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + legacy_recv: bool = False, + klass: Optional[Type[WebSocketServerProtocol]] = None, + timeout: Optional[float] = None, + compression: Optional[str] = "deflate", + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + process_request: Optional[ + Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] + ] = None, + select_subprotocol: Optional[ + Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] + ] = None, + **kwargs: Any, + ) -> None: + # Backwards compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + # Backwards compatibility: create_protocol used to be called klass. + if klass is None: + klass = WebSocketServerProtocol + else: + warnings.warn("rename klass to create_protocol", DeprecationWarning) + # If both are specified, klass is ignored. + if create_protocol is None: + create_protocol = klass + + if loop is None: + loop = asyncio.get_event_loop() + + ws_server = WebSocketServer(loop) + + secure = kwargs.get("ssl") is not None + + if compression == "deflate": + if extensions is None: + extensions = [] + if not any( + ext_factory.name == ServerPerMessageDeflateFactory.name + for ext_factory in extensions + ): + extensions = list(extensions) + [ServerPerMessageDeflateFactory()] + elif compression is not None: + raise ValueError(f"unsupported compression: {compression}") + + factory = functools.partial( + create_protocol, + ws_handler, + ws_server, + host=host, + port=port, + secure=secure, + ping_interval=ping_interval, + ping_timeout=ping_timeout, + close_timeout=close_timeout, + max_size=max_size, + max_queue=max_queue, + read_limit=read_limit, + write_limit=write_limit, + loop=loop, + legacy_recv=legacy_recv, + origins=origins, + extensions=extensions, + subprotocols=subprotocols, + extra_headers=extra_headers, + process_request=process_request, + select_subprotocol=select_subprotocol, + ) + + if path is None: + create_server = functools.partial( + loop.create_server, factory, host, port, **kwargs + ) + else: + # unix_serve(path) must not specify host and port parameters. + assert host is None and port is None + create_server = functools.partial( + loop.create_unix_server, factory, path, **kwargs + ) + + # This is a coroutine function. + self._create_server = create_server + self.ws_server = ws_server + + # async with serve(...) + + async def __aenter__(self) -> WebSocketServer: + return await self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + self.ws_server.close() + await self.ws_server.wait_closed() + + # await serve(...) + + def __await__(self) -> Generator[Any, None, WebSocketServer]: + # Create a suitable iterator by calling __await__ on a coroutine. + return self.__await_impl__().__await__() + + async def __await_impl__(self) -> WebSocketServer: + server = await self._create_server() + self.ws_server.wrap(server) + return self.ws_server + + # yield from serve(...) + + __iter__ = __await__ + + +serve = Serve + + +def unix_serve( + ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], + path: str, + **kwargs: Any, +) -> Serve: + """ + Similar to :func:`serve`, but for listening on Unix sockets. + + This function calls the event loop's + :meth:`~asyncio.loop.create_unix_server` method. + + It is only available on Unix. + + It's useful for deploying a server behind a reverse proxy such as nginx. + + :param path: file system path to the Unix socket + + """ + return serve(ws_handler, path=path, **kwargs) diff --git a/src/websockets/auth.py b/src/websockets/auth.py index 8198cd9d0..03e8536c5 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -9,10 +9,10 @@ import http from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Type, Union +from .asyncio_server import HTTPResponse, WebSocketServerProtocol from .datastructures import Headers from .exceptions import InvalidHeader from .headers import build_www_authenticate_basic, parse_authorization_basic -from .server import HTTPResponse, WebSocketServerProtocol __all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] diff --git a/src/websockets/client.py b/src/websockets/client.py index f95dae060..c7d153f13 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -1,588 +1,8 @@ -""" -:mod:`websockets.client` defines the WebSocket client APIs. +from .asyncio_client import WebSocketClientProtocol, connect, unix_connect -""" -import asyncio -import collections.abc -import functools -import logging -import warnings -from types import TracebackType -from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast - -from .datastructures import Headers, HeadersLike -from .exceptions import ( - InvalidHandshake, - InvalidHeader, - InvalidMessage, - InvalidStatusCode, - NegotiationError, - RedirectHandshake, - SecurityError, -) -from .extensions.base import ClientExtensionFactory, Extension -from .extensions.permessage_deflate import ClientPerMessageDeflateFactory -from .handshake_legacy import build_request, check_response -from .headers import ( - build_authorization_basic, - build_extension, - build_subprotocol, - parse_extension, - parse_subprotocol, -) -from .http import USER_AGENT -from .http_legacy import read_response -from .protocol import WebSocketCommonProtocol -from .typing import ExtensionHeader, Origin, Subprotocol -from .uri import WebSocketURI, parse_uri - - -__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] - -logger = logging.getLogger(__name__) - - -class WebSocketClientProtocol(WebSocketCommonProtocol): - """ - :class:`~asyncio.Protocol` subclass implementing a WebSocket client. - - This class inherits most of its methods from - :class:`~websockets.protocol.WebSocketCommonProtocol`. - - """ - - is_client = True - side = "client" - - def __init__( - self, - *, - origin: Optional[Origin] = None, - extensions: Optional[Sequence[ClientExtensionFactory]] = None, - subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLike] = None, - **kwargs: Any, - ) -> None: - self.origin = origin - self.available_extensions = extensions - self.available_subprotocols = subprotocols - self.extra_headers = extra_headers - super().__init__(**kwargs) - - def write_http_request(self, path: str, headers: Headers) -> None: - """ - Write request line and headers to the HTTP request. - - """ - self.path = path - self.request_headers = headers - - logger.debug("%s > GET %s HTTP/1.1", self.side, path) - logger.debug("%s > %r", self.side, headers) - - # Since the path and headers only contain ASCII characters, - # we can keep this simple. - request = f"GET {path} HTTP/1.1\r\n" - request += str(headers) - - self.transport.write(request.encode()) - - async def read_http_response(self) -> Tuple[int, Headers]: - """ - Read status line and headers from the HTTP response. - - If the response contains a body, it may be read from ``self.reader`` - after this coroutine returns. - - :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is - malformed or isn't an HTTP/1.1 GET response - - """ - try: - status_code, reason, headers = await read_response(self.reader) - except asyncio.CancelledError: # pragma: no cover - raise - except Exception as exc: - raise InvalidMessage("did not receive a valid HTTP response") from exc - - logger.debug("%s < HTTP/1.1 %d %s", self.side, status_code, reason) - logger.debug("%s < %r", self.side, headers) - - self.response_headers = headers - - return status_code, self.response_headers - - @staticmethod - def process_extensions( - headers: Headers, - available_extensions: Optional[Sequence[ClientExtensionFactory]], - ) -> List[Extension]: - """ - Handle the Sec-WebSocket-Extensions HTTP response header. - - Check that each extension is supported, as well as its parameters. - - Return the list of accepted extensions. - - Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the - connection. - - :rfc:`6455` leaves the rules up to the specification of each - :extension. - - To provide this level of flexibility, for each extension accepted by - the server, we check for a match with each extension available in the - client configuration. If no match is found, an exception is raised. - - If several variants of the same extension are accepted by the server, - it may be configured several times, which won't make sense in general. - Extensions must implement their own requirements. For this purpose, - the list of previously accepted extensions is provided. - - Other requirements, for example related to mandatory extensions or the - order of extensions, may be implemented by overriding this method. - - """ - accepted_extensions: List[Extension] = [] - - header_values = headers.get_all("Sec-WebSocket-Extensions") - - if header_values: - - if available_extensions is None: - raise InvalidHandshake("no extensions supported") - - parsed_header_values: List[ExtensionHeader] = sum( - [parse_extension(header_value) for header_value in header_values], [] - ) - - for name, response_params in parsed_header_values: - - for extension_factory in available_extensions: - - # Skip non-matching extensions based on their name. - if extension_factory.name != name: - continue - - # Skip non-matching extensions based on their params. - try: - extension = extension_factory.process_response_params( - response_params, accepted_extensions - ) - except NegotiationError: - continue - - # Add matching extension to the final list. - accepted_extensions.append(extension) - - # Break out of the loop once we have a match. - break - - # If we didn't break from the loop, no extension in our list - # matched what the server sent. Fail the connection. - else: - raise NegotiationError( - f"Unsupported extension: " - f"name = {name}, params = {response_params}" - ) - - return accepted_extensions - - @staticmethod - def process_subprotocol( - headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] - ) -> Optional[Subprotocol]: - """ - Handle the Sec-WebSocket-Protocol HTTP response header. - - Check that it contains exactly one supported subprotocol. - - Return the selected subprotocol. - - """ - subprotocol: Optional[Subprotocol] = None - - header_values = headers.get_all("Sec-WebSocket-Protocol") - - if header_values: - - if available_subprotocols is None: - raise InvalidHandshake("no subprotocols supported") - - parsed_header_values: Sequence[Subprotocol] = sum( - [parse_subprotocol(header_value) for header_value in header_values], [] - ) - - if len(parsed_header_values) > 1: - subprotocols = ", ".join(parsed_header_values) - raise InvalidHandshake(f"multiple subprotocols: {subprotocols}") - - subprotocol = parsed_header_values[0] - - if subprotocol not in available_subprotocols: - raise NegotiationError(f"unsupported subprotocol: {subprotocol}") - - return subprotocol - - async def handshake( - self, - wsuri: WebSocketURI, - origin: Optional[Origin] = None, - available_extensions: Optional[Sequence[ClientExtensionFactory]] = None, - available_subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLike] = None, - ) -> None: - """ - Perform the client side of the opening handshake. - - :param origin: sets the Origin HTTP header - :param available_extensions: list of supported extensions in the order - in which they should be used - :param available_subprotocols: list of supported subprotocols in order - of decreasing preference - :param extra_headers: sets additional HTTP request headers; it must be - a :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, or an iterable of ``(name, - value)`` pairs - :raises ~websockets.exceptions.InvalidHandshake: if the handshake - fails - - """ - request_headers = Headers() - - if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover - request_headers["Host"] = wsuri.host - else: - request_headers["Host"] = f"{wsuri.host}:{wsuri.port}" - - if wsuri.user_info: - request_headers["Authorization"] = build_authorization_basic( - *wsuri.user_info - ) - - if origin is not None: - request_headers["Origin"] = origin - - key = build_request(request_headers) - - if available_extensions is not None: - extensions_header = build_extension( - [ - (extension_factory.name, extension_factory.get_request_params()) - for extension_factory in available_extensions - ] - ) - request_headers["Sec-WebSocket-Extensions"] = extensions_header - - if available_subprotocols is not None: - protocol_header = build_subprotocol(available_subprotocols) - request_headers["Sec-WebSocket-Protocol"] = protocol_header - - if extra_headers is not None: - if isinstance(extra_headers, Headers): - extra_headers = extra_headers.raw_items() - elif isinstance(extra_headers, collections.abc.Mapping): - extra_headers = extra_headers.items() - for name, value in extra_headers: - request_headers[name] = value - - request_headers.setdefault("User-Agent", USER_AGENT) - - self.write_http_request(wsuri.resource_name, request_headers) - - status_code, response_headers = await self.read_http_response() - if status_code in (301, 302, 303, 307, 308): - if "Location" not in response_headers: - raise InvalidHeader("Location") - raise RedirectHandshake(response_headers["Location"]) - elif status_code != 101: - raise InvalidStatusCode(status_code) - - check_response(response_headers, key) - - self.extensions = self.process_extensions( - response_headers, available_extensions - ) - - self.subprotocol = self.process_subprotocol( - response_headers, available_subprotocols - ) - - self.connection_open() - - -class Connect: - """ - Connect to the WebSocket server at the given ``uri``. - - Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which - can then be used to send and receive messages. - - :func:`connect` can also be used as a asynchronous context manager. In - that case, the connection is closed when exiting the context. - - :func:`connect` is a wrapper around the event loop's - :meth:`~asyncio.loop.create_connection` method. Unknown keyword arguments - are passed to :meth:`~asyncio.loop.create_connection`. - - For example, you can set the ``ssl`` keyword argument to a - :class:`~ssl.SSLContext` to enforce some TLS settings. When connecting to - a ``wss://`` URI, if this argument isn't provided explicitly, - :func:`ssl.create_default_context` is called to create a context. - - You can connect to a different host and port from those found in ``uri`` - by setting ``host`` and ``port`` keyword arguments. This only changes the - destination of the TCP connection. The host name from ``uri`` is still - used in the TLS handshake for secure connections and in the ``Host`` HTTP - header. - - The ``create_protocol`` parameter allows customizing the - :class:`~asyncio.Protocol` that manages the connection. It should be a - callable or class accepting the same arguments as - :class:`WebSocketClientProtocol` and returning an instance of - :class:`WebSocketClientProtocol` or a subclass. It defaults to - :class:`WebSocketClientProtocol`. - - The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, - ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is - described in :class:`~websockets.protocol.WebSocketCommonProtocol`. - - :func:`connect` also accepts the following optional arguments: - - * ``compression`` is a shortcut to configure compression extensions; - by default it enables the "permessage-deflate" extension; set it to - ``None`` to disable compression - * ``origin`` sets the Origin HTTP header - * ``extensions`` is a list of supported extensions in order of - decreasing preference - * ``subprotocols`` is a list of supported subprotocols in order of - decreasing preference - * ``extra_headers`` sets additional HTTP request headers; it can be a - :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` - pairs - - :raises ~websockets.uri.InvalidURI: if ``uri`` is invalid - :raises ~websockets.handshake.InvalidHandshake: if the opening handshake - fails - - """ - - MAX_REDIRECTS_ALLOWED = 10 - - def __init__( - self, - uri: str, - *, - path: Optional[str] = None, - create_protocol: Optional[Type[WebSocketClientProtocol]] = None, - ping_interval: Optional[float] = 20, - ping_timeout: Optional[float] = 20, - close_timeout: Optional[float] = None, - max_size: Optional[int] = 2 ** 20, - max_queue: Optional[int] = 2 ** 5, - read_limit: int = 2 ** 16, - write_limit: int = 2 ** 16, - loop: Optional[asyncio.AbstractEventLoop] = None, - legacy_recv: bool = False, - klass: Optional[Type[WebSocketClientProtocol]] = None, - timeout: Optional[float] = None, - compression: Optional[str] = "deflate", - origin: Optional[Origin] = None, - extensions: Optional[Sequence[ClientExtensionFactory]] = None, - subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLike] = None, - **kwargs: Any, - ) -> None: - # Backwards compatibility: close_timeout used to be called timeout. - if timeout is None: - timeout = 10 - else: - warnings.warn("rename timeout to close_timeout", DeprecationWarning) - # If both are specified, timeout is ignored. - if close_timeout is None: - close_timeout = timeout - - # Backwards compatibility: create_protocol used to be called klass. - if klass is None: - klass = WebSocketClientProtocol - else: - warnings.warn("rename klass to create_protocol", DeprecationWarning) - # If both are specified, klass is ignored. - if create_protocol is None: - create_protocol = klass - - if loop is None: - loop = asyncio.get_event_loop() - - wsuri = parse_uri(uri) - if wsuri.secure: - kwargs.setdefault("ssl", True) - elif kwargs.get("ssl") is not None: - raise ValueError( - "connect() received a ssl argument for a ws:// URI, " - "use a wss:// URI to enable TLS" - ) - - if compression == "deflate": - if extensions is None: - extensions = [] - if not any( - extension_factory.name == ClientPerMessageDeflateFactory.name - for extension_factory in extensions - ): - extensions = list(extensions) + [ - ClientPerMessageDeflateFactory(client_max_window_bits=True) - ] - elif compression is not None: - raise ValueError(f"unsupported compression: {compression}") - - factory = functools.partial( - create_protocol, - ping_interval=ping_interval, - ping_timeout=ping_timeout, - close_timeout=close_timeout, - max_size=max_size, - max_queue=max_queue, - read_limit=read_limit, - write_limit=write_limit, - loop=loop, - host=wsuri.host, - port=wsuri.port, - secure=wsuri.secure, - legacy_recv=legacy_recv, - origin=origin, - extensions=extensions, - subprotocols=subprotocols, - extra_headers=extra_headers, - ) - - if path is None: - host: Optional[str] - port: Optional[int] - if kwargs.get("sock") is None: - host, port = wsuri.host, wsuri.port - else: - # If sock is given, host and port shouldn't be specified. - host, port = None, None - # If host and port are given, override values from the URI. - host = kwargs.pop("host", host) - port = kwargs.pop("port", port) - create_connection = functools.partial( - loop.create_connection, factory, host, port, **kwargs - ) - else: - create_connection = functools.partial( - loop.create_unix_connection, factory, path, **kwargs - ) - - # This is a coroutine function. - self._create_connection = create_connection - self._wsuri = wsuri - - def handle_redirect(self, uri: str) -> None: - # Update the state of this instance to connect to a new URI. - old_wsuri = self._wsuri - new_wsuri = parse_uri(uri) - - # Forbid TLS downgrade. - if old_wsuri.secure and not new_wsuri.secure: - raise SecurityError("redirect from WSS to WS") - - same_origin = ( - old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port - ) - - # Rewrite the host and port arguments for cross-origin redirects. - # This preserves connection overrides with the host and port - # arguments if the redirect points to the same host and port. - if not same_origin: - # Replace the host and port argument passed to the protocol factory. - factory = self._create_connection.args[0] - factory = functools.partial( - factory.func, - *factory.args, - **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port), - ) - # Replace the host and port argument passed to create_connection. - self._create_connection = functools.partial( - self._create_connection.func, - *(factory, new_wsuri.host, new_wsuri.port), - **self._create_connection.keywords, - ) - - # Set the new WebSocket URI. This suffices for same-origin redirects. - self._wsuri = new_wsuri - - # async with connect(...) - - async def __aenter__(self) -> WebSocketClientProtocol: - return await self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - await self.ws_client.close() - - # await connect(...) - - def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]: - # Create a suitable iterator by calling __await__ on a coroutine. - return self.__await_impl__().__await__() - - async def __await_impl__(self) -> WebSocketClientProtocol: - for redirects in range(self.MAX_REDIRECTS_ALLOWED): - transport, protocol = await self._create_connection() - # https://github.com/python/typeshed/pull/2756 - transport = cast(asyncio.Transport, transport) - protocol = cast(WebSocketClientProtocol, protocol) - - try: - try: - await protocol.handshake( - self._wsuri, - origin=protocol.origin, - available_extensions=protocol.available_extensions, - available_subprotocols=protocol.available_subprotocols, - extra_headers=protocol.extra_headers, - ) - except Exception: - protocol.fail_connection() - await protocol.wait_closed() - raise - else: - self.ws_client = protocol - return protocol - except RedirectHandshake as exc: - self.handle_redirect(exc.uri) - else: - raise SecurityError("too many redirects") - - # yield from connect(...) - - __iter__ = __await__ - - -connect = Connect - - -def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Connect: - """ - Similar to :func:`connect`, but for connecting to a Unix socket. - - This function calls the event loop's - :meth:`~asyncio.loop.create_unix_connection` method. - - It is only available on Unix. - - It's mainly useful for debugging servers listening on Unix sockets. - - :param path: file system path to the Unix socket - :param uri: WebSocket URI - - """ - return connect(uri=uri, path=path, **kwargs) +__all__ = [ + "connect", + "unix_connect", + "WebSocketClientProtocol", +] diff --git a/src/websockets/server.py b/src/websockets/server.py index 522c76114..ec94a2fbf 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -1,1004 +1,9 @@ -""" -:mod:`websockets.server` defines the WebSocket server APIs. +from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve -""" -import asyncio -import collections.abc -import email.utils -import functools -import http -import logging -import socket -import sys -import warnings -from types import TracebackType -from typing import ( - Any, - Awaitable, - Callable, - Generator, - List, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, - cast, -) - -from .datastructures import Headers, HeadersLike, MultipleValuesError -from .exceptions import ( - AbortHandshake, - InvalidHandshake, - InvalidHeader, - InvalidMessage, - InvalidOrigin, - InvalidUpgrade, - NegotiationError, -) -from .extensions.base import Extension, ServerExtensionFactory -from .extensions.permessage_deflate import ServerPerMessageDeflateFactory -from .handshake_legacy import build_response, check_request -from .headers import build_extension, parse_extension, parse_subprotocol -from .http import USER_AGENT -from .http_legacy import read_request -from .protocol import WebSocketCommonProtocol -from .typing import ExtensionHeader, Origin, Subprotocol - - -__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] - -logger = logging.getLogger(__name__) - - -HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] - -HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes] - - -class WebSocketServerProtocol(WebSocketCommonProtocol): - """ - :class:`~asyncio.Protocol` subclass implementing a WebSocket server. - - This class inherits most of its methods from - :class:`~websockets.protocol.WebSocketCommonProtocol`. - - For the sake of simplicity, it doesn't rely on a full HTTP implementation. - Its support for HTTP responses is very limited. - - """ - - is_client = False - side = "server" - - def __init__( - self, - ws_handler: Callable[["WebSocketServerProtocol", str], Awaitable[Any]], - ws_server: "WebSocketServer", - *, - origins: Optional[Sequence[Optional[Origin]]] = None, - extensions: Optional[Sequence[ServerExtensionFactory]] = None, - subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLikeOrCallable] = None, - process_request: Optional[ - Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] - ] = None, - select_subprotocol: Optional[ - Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] - ] = None, - **kwargs: Any, - ) -> None: - # For backwards compatibility with 6.0 or earlier. - if origins is not None and "" in origins: - warnings.warn("use None instead of '' in origins", DeprecationWarning) - origins = [None if origin == "" else origin for origin in origins] - self.ws_handler = ws_handler - self.ws_server = ws_server - self.origins = origins - self.available_extensions = extensions - self.available_subprotocols = subprotocols - self.extra_headers = extra_headers - self._process_request = process_request - self._select_subprotocol = select_subprotocol - super().__init__(**kwargs) - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - """ - Register connection and initialize a task to handle it. - - """ - super().connection_made(transport) - # Register the connection with the server before creating the handler - # task. Registering at the beginning of the handler coroutine would - # create a race condition between the creation of the task, which - # schedules its execution, and the moment the handler starts running. - self.ws_server.register(self) - self.handler_task = self.loop.create_task(self.handler()) - - async def handler(self) -> None: - """ - Handle the lifecycle of a WebSocket connection. - - Since this method doesn't have a caller able to handle exceptions, it - attemps to log relevant ones and guarantees that the TCP connection is - closed before exiting. - - """ - try: - - try: - path = await self.handshake( - origins=self.origins, - available_extensions=self.available_extensions, - available_subprotocols=self.available_subprotocols, - extra_headers=self.extra_headers, - ) - except asyncio.CancelledError: # pragma: no cover - raise - except ConnectionError: - logger.debug("Connection error in opening handshake", exc_info=True) - raise - except Exception as exc: - if isinstance(exc, AbortHandshake): - status, headers, body = exc.status, exc.headers, exc.body - elif isinstance(exc, InvalidOrigin): - logger.debug("Invalid origin", exc_info=True) - status, headers, body = ( - http.HTTPStatus.FORBIDDEN, - Headers(), - f"Failed to open a WebSocket connection: {exc}.\n".encode(), - ) - elif isinstance(exc, InvalidUpgrade): - logger.debug("Invalid upgrade", exc_info=True) - status, headers, body = ( - http.HTTPStatus.UPGRADE_REQUIRED, - Headers([("Upgrade", "websocket")]), - ( - f"Failed to open a WebSocket connection: {exc}.\n" - f"\n" - f"You cannot access a WebSocket server directly " - f"with a browser. You need a WebSocket client.\n" - ).encode(), - ) - elif isinstance(exc, InvalidHandshake): - logger.debug("Invalid handshake", exc_info=True) - status, headers, body = ( - http.HTTPStatus.BAD_REQUEST, - Headers(), - f"Failed to open a WebSocket connection: {exc}.\n".encode(), - ) - else: - logger.warning("Error in opening handshake", exc_info=True) - status, headers, body = ( - http.HTTPStatus.INTERNAL_SERVER_ERROR, - Headers(), - ( - b"Failed to open a WebSocket connection.\n" - b"See server log for more information.\n" - ), - ) - - headers.setdefault("Date", email.utils.formatdate(usegmt=True)) - headers.setdefault("Server", USER_AGENT) - headers.setdefault("Content-Length", str(len(body))) - headers.setdefault("Content-Type", "text/plain") - headers.setdefault("Connection", "close") - - self.write_http_response(status, headers, body) - self.fail_connection() - await self.wait_closed() - return - - try: - await self.ws_handler(self, path) - except Exception: - logger.error("Error in connection handler", exc_info=True) - if not self.closed: - self.fail_connection(1011) - raise - - try: - await self.close() - except ConnectionError: - logger.debug("Connection error in closing handshake", exc_info=True) - raise - except Exception: - logger.warning("Error in closing handshake", exc_info=True) - raise - - except Exception: - # Last-ditch attempt to avoid leaking connections on errors. - try: - self.transport.close() - except Exception: # pragma: no cover - pass - - finally: - # Unregister the connection with the server when the handler task - # terminates. Registration is tied to the lifecycle of the handler - # task because the server waits for tasks attached to registered - # connections before terminating. - self.ws_server.unregister(self) - - async def read_http_request(self) -> Tuple[str, Headers]: - """ - Read request line and headers from the HTTP request. - - If the request contains a body, it may be read from ``self.reader`` - after this coroutine returns. - - :raises ~websockets.exceptions.InvalidMessage: if the HTTP message is - malformed or isn't an HTTP/1.1 GET request - - """ - try: - path, headers = await read_request(self.reader) - except asyncio.CancelledError: # pragma: no cover - raise - except Exception as exc: - raise InvalidMessage("did not receive a valid HTTP request") from exc - - logger.debug("%s < GET %s HTTP/1.1", self.side, path) - logger.debug("%s < %r", self.side, headers) - - self.path = path - self.request_headers = headers - - return path, headers - - def write_http_response( - self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None - ) -> None: - """ - Write status line and headers to the HTTP response. - - This coroutine is also able to write a response body. - - """ - self.response_headers = headers - - logger.debug("%s > HTTP/1.1 %d %s", self.side, status.value, status.phrase) - logger.debug("%s > %r", self.side, headers) - - # Since the status line and headers only contain ASCII characters, - # we can keep this simple. - response = f"HTTP/1.1 {status.value} {status.phrase}\r\n" - response += str(headers) - - self.transport.write(response.encode()) - - if body is not None: - logger.debug("%s > body (%d bytes)", self.side, len(body)) - self.transport.write(body) - - async def process_request( - self, path: str, request_headers: Headers - ) -> Optional[HTTPResponse]: - """ - Intercept the HTTP request and return an HTTP response if appropriate. - - If ``process_request`` returns ``None``, the WebSocket handshake - continues. If it returns 3-uple containing a status code, response - headers and a response body, that HTTP response is sent and the - connection is closed. In that case: - - * The HTTP status must be a :class:`~http.HTTPStatus`. - * HTTP headers must be a :class:`~websockets.http.Headers` instance, a - :class:`~collections.abc.Mapping`, or an iterable of ``(name, - value)`` pairs. - * The HTTP response body must be :class:`bytes`. It may be empty. - - This coroutine may be overridden in a :class:`WebSocketServerProtocol` - subclass, for example: - - * to return a HTTP 200 OK response on a given path; then a load - balancer can use this path for a health check; - * to authenticate the request and return a HTTP 401 Unauthorized or a - HTTP 403 Forbidden when authentication fails. - - Instead of subclassing, it is possible to override this method by - passing a ``process_request`` argument to the :func:`serve` function - or the :class:`WebSocketServerProtocol` constructor. This is - equivalent, except ``process_request`` won't have access to the - protocol instance, so it can't store information for later use. - - ``process_request`` is expected to complete quickly. If it may run for - a long time, then it should await :meth:`wait_closed` and exit if - :meth:`wait_closed` completes, or else it could prevent the server - from shutting down. - - :param path: request path, including optional query string - :param request_headers: request headers - - """ - if self._process_request is not None: - response = self._process_request(path, request_headers) - if isinstance(response, Awaitable): - return await response - else: - # For backwards compatibility with 7.0. - warnings.warn( - "declare process_request as a coroutine", DeprecationWarning - ) - return response # type: ignore - return None - - @staticmethod - def process_origin( - headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None - ) -> Optional[Origin]: - """ - Handle the Origin HTTP request header. - - :param headers: request headers - :param origins: optional list of acceptable origins - :raises ~websockets.exceptions.InvalidOrigin: if the origin isn't - acceptable - - """ - # "The user agent MUST NOT include more than one Origin header field" - # per https://tools.ietf.org/html/rfc6454#section-7.3. - try: - origin = cast(Origin, headers.get("Origin")) - except MultipleValuesError as exc: - raise InvalidHeader("Origin", "more than one Origin header found") from exc - if origins is not None: - if origin not in origins: - raise InvalidOrigin(origin) - return origin - - @staticmethod - def process_extensions( - headers: Headers, - available_extensions: Optional[Sequence[ServerExtensionFactory]], - ) -> Tuple[Optional[str], List[Extension]]: - """ - Handle the Sec-WebSocket-Extensions HTTP request header. - - Accept or reject each extension proposed in the client request. - Negotiate parameters for accepted extensions. - - Return the Sec-WebSocket-Extensions HTTP response header and the list - of accepted extensions. - - :rfc:`6455` leaves the rules up to the specification of each - :extension. - - To provide this level of flexibility, for each extension proposed by - the client, we check for a match with each extension available in the - server configuration. If no match is found, the extension is ignored. - - If several variants of the same extension are proposed by the client, - it may be accepted several times, which won't make sense in general. - Extensions must implement their own requirements. For this purpose, - the list of previously accepted extensions is provided. - - This process doesn't allow the server to reorder extensions. It can - only select a subset of the extensions proposed by the client. - - Other requirements, for example related to mandatory extensions or the - order of extensions, may be implemented by overriding this method. - - :param headers: request headers - :param extensions: optional list of supported extensions - :raises ~websockets.exceptions.InvalidHandshake: to abort the - handshake with an HTTP 400 error code - - """ - response_header_value: Optional[str] = None - - extension_headers: List[ExtensionHeader] = [] - accepted_extensions: List[Extension] = [] - - header_values = headers.get_all("Sec-WebSocket-Extensions") - - if header_values and available_extensions: - - parsed_header_values: List[ExtensionHeader] = sum( - [parse_extension(header_value) for header_value in header_values], [] - ) - - for name, request_params in parsed_header_values: - - for ext_factory in available_extensions: - - # Skip non-matching extensions based on their name. - if ext_factory.name != name: - continue - - # Skip non-matching extensions based on their params. - try: - response_params, extension = ext_factory.process_request_params( - request_params, accepted_extensions - ) - except NegotiationError: - continue - - # Add matching extension to the final list. - extension_headers.append((name, response_params)) - accepted_extensions.append(extension) - - # Break out of the loop once we have a match. - break - - # If we didn't break from the loop, no extension in our list - # matched what the client sent. The extension is declined. - - # Serialize extension header. - if extension_headers: - response_header_value = build_extension(extension_headers) - - return response_header_value, accepted_extensions - - # Not @staticmethod because it calls self.select_subprotocol() - def process_subprotocol( - self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]] - ) -> Optional[Subprotocol]: - """ - Handle the Sec-WebSocket-Protocol HTTP request header. - - Return Sec-WebSocket-Protocol HTTP response header, which is the same - as the selected subprotocol. - - :param headers: request headers - :param available_subprotocols: optional list of supported subprotocols - :raises ~websockets.exceptions.InvalidHandshake: to abort the - handshake with an HTTP 400 error code - - """ - subprotocol: Optional[Subprotocol] = None - - header_values = headers.get_all("Sec-WebSocket-Protocol") - - if header_values and available_subprotocols: - - parsed_header_values: List[Subprotocol] = sum( - [parse_subprotocol(header_value) for header_value in header_values], [] - ) - - subprotocol = self.select_subprotocol( - parsed_header_values, available_subprotocols - ) - - return subprotocol - - def select_subprotocol( - self, - client_subprotocols: Sequence[Subprotocol], - server_subprotocols: Sequence[Subprotocol], - ) -> Optional[Subprotocol]: - """ - Pick a subprotocol among those offered by the client. - - If several subprotocols are supported by the client and the server, - the default implementation selects the preferred subprotocols by - giving equal value to the priorities of the client and the server. - - If no subprotocol is supported by the client and the server, it - proceeds without a subprotocol. - - This is unlikely to be the most useful implementation in practice, as - many servers providing a subprotocol will require that the client uses - that subprotocol. Such rules can be implemented in a subclass. - - Instead of subclassing, it is possible to override this method by - passing a ``select_subprotocol`` argument to the :func:`serve` - function or the :class:`WebSocketServerProtocol` constructor - - :param client_subprotocols: list of subprotocols offered by the client - :param server_subprotocols: list of subprotocols available on the server - - """ - if self._select_subprotocol is not None: - return self._select_subprotocol(client_subprotocols, server_subprotocols) - - subprotocols = set(client_subprotocols) & set(server_subprotocols) - if not subprotocols: - return None - priority = lambda p: ( - client_subprotocols.index(p) + server_subprotocols.index(p) - ) - return sorted(subprotocols, key=priority)[0] - - async def handshake( - self, - origins: Optional[Sequence[Optional[Origin]]] = None, - available_extensions: Optional[Sequence[ServerExtensionFactory]] = None, - available_subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLikeOrCallable] = None, - ) -> str: - """ - Perform the server side of the opening handshake. - - Return the path of the URI of the request. - - :param origins: list of acceptable values of the Origin HTTP header; - include ``None`` if the lack of an origin is acceptable - :param available_extensions: list of supported extensions in the order - in which they should be used - :param available_subprotocols: list of supported subprotocols in order - of decreasing preference - :param extra_headers: sets additional HTTP response headers when the - handshake succeeds; it can be a :class:`~websockets.http.Headers` - instance, a :class:`~collections.abc.Mapping`, an iterable of - ``(name, value)`` pairs, or a callable taking the request path and - headers in arguments and returning one of the above. - :raises ~websockets.exceptions.InvalidHandshake: if the handshake - fails - - """ - path, request_headers = await self.read_http_request() - - # Hook for customizing request handling, for example checking - # authentication or treating some paths as plain HTTP endpoints. - early_response_awaitable = self.process_request(path, request_headers) - if isinstance(early_response_awaitable, Awaitable): - early_response = await early_response_awaitable - else: - # For backwards compatibility with 7.0. - warnings.warn("declare process_request as a coroutine", DeprecationWarning) - early_response = early_response_awaitable # type: ignore - - # Change the response to a 503 error if the server is shutting down. - if not self.ws_server.is_serving(): - early_response = ( - http.HTTPStatus.SERVICE_UNAVAILABLE, - [], - b"Server is shutting down.\n", - ) - - if early_response is not None: - raise AbortHandshake(*early_response) - - key = check_request(request_headers) - - self.origin = self.process_origin(request_headers, origins) - - extensions_header, self.extensions = self.process_extensions( - request_headers, available_extensions - ) - - protocol_header = self.subprotocol = self.process_subprotocol( - request_headers, available_subprotocols - ) - - response_headers = Headers() - - build_response(response_headers, key) - - if extensions_header is not None: - response_headers["Sec-WebSocket-Extensions"] = extensions_header - - if protocol_header is not None: - response_headers["Sec-WebSocket-Protocol"] = protocol_header - - if callable(extra_headers): - extra_headers = extra_headers(path, self.request_headers) - if extra_headers is not None: - if isinstance(extra_headers, Headers): - extra_headers = extra_headers.raw_items() - elif isinstance(extra_headers, collections.abc.Mapping): - extra_headers = extra_headers.items() - for name, value in extra_headers: - response_headers[name] = value - - response_headers.setdefault("Date", email.utils.formatdate(usegmt=True)) - response_headers.setdefault("Server", USER_AGENT) - - self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers) - - self.connection_open() - - return path - - -class WebSocketServer: - """ - WebSocket server returned by :func:`~websockets.server.serve`. - - This class provides the same interface as - :class:`~asyncio.AbstractServer`, namely the - :meth:`~asyncio.AbstractServer.close` and - :meth:`~asyncio.AbstractServer.wait_closed` methods. - - It keeps track of WebSocket connections in order to close them properly - when shutting down. - - Instances of this class store a reference to the :class:`~asyncio.Server` - object returned by :meth:`~asyncio.loop.create_server` rather than inherit - from :class:`~asyncio.Server` in part because - :meth:`~asyncio.loop.create_server` doesn't support passing a custom - :class:`~asyncio.Server` class. - - """ - - def __init__(self, loop: asyncio.AbstractEventLoop) -> None: - # Store a reference to loop to avoid relying on self.server._loop. - self.loop = loop - - # Keep track of active connections. - self.websockets: Set[WebSocketServerProtocol] = set() - - # Task responsible for closing the server and terminating connections. - self.close_task: Optional[asyncio.Task[None]] = None - - # Completed when the server is closed and connections are terminated. - self.closed_waiter: asyncio.Future[None] = loop.create_future() - - def wrap(self, server: asyncio.AbstractServer) -> None: - """ - Attach to a given :class:`~asyncio.Server`. - - Since :meth:`~asyncio.loop.create_server` doesn't support injecting a - custom ``Server`` class, the easiest solution that doesn't rely on - private :mod:`asyncio` APIs is to: - - - instantiate a :class:`WebSocketServer` - - give the protocol factory a reference to that instance - - call :meth:`~asyncio.loop.create_server` with the factory - - attach the resulting :class:`~asyncio.Server` with this method - - """ - self.server = server - - def register(self, protocol: WebSocketServerProtocol) -> None: - """ - Register a connection with this server. - - """ - self.websockets.add(protocol) - - def unregister(self, protocol: WebSocketServerProtocol) -> None: - """ - Unregister a connection with this server. - - """ - self.websockets.remove(protocol) - - def is_serving(self) -> bool: - """ - Tell whether the server is accepting new connections or shutting down. - - """ - try: - # Python ≥ 3.7 - return self.server.is_serving() - except AttributeError: # pragma: no cover - # Python < 3.7 - return self.server.sockets is not None - - def close(self) -> None: - """ - Close the server. - - This method: - - * closes the underlying :class:`~asyncio.Server`; - * rejects new WebSocket connections with an HTTP 503 (service - unavailable) error; this happens when the server accepted the TCP - connection but didn't complete the WebSocket opening handshake prior - to closing; - * closes open WebSocket connections with close code 1001 (going away). - - :meth:`close` is idempotent. - - """ - if self.close_task is None: - self.close_task = self.loop.create_task(self._close()) - - async def _close(self) -> None: - """ - Implementation of :meth:`close`. - - This calls :meth:`~asyncio.Server.close` on the underlying - :class:`~asyncio.Server` object to stop accepting new connections and - then closes open connections with close code 1001. - - """ - # Stop accepting new connections. - self.server.close() - - # Wait until self.server.close() completes. - await self.server.wait_closed() - - # Wait until all accepted connections reach connection_made() and call - # register(). See https://bugs.python.org/issue34852 for details. - await asyncio.sleep( - 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None - ) - - # Close OPEN connections with status code 1001. Since the server was - # closed, handshake() closes OPENING conections with a HTTP 503 error. - # Wait until all connections are closed. - - # asyncio.wait doesn't accept an empty first argument - if self.websockets: - await asyncio.wait( - [ - asyncio.ensure_future(websocket.close(1001)) - for websocket in self.websockets - ], - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - - # Wait until all connection handlers are complete. - - # asyncio.wait doesn't accept an empty first argument. - if self.websockets: - await asyncio.wait( - [websocket.handler_task for websocket in self.websockets], - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - - # Tell wait_closed() to return. - self.closed_waiter.set_result(None) - - async def wait_closed(self) -> None: - """ - Wait until the server is closed. - - When :meth:`wait_closed` returns, all TCP connections are closed and - all connection handlers have returned. - - """ - await asyncio.shield(self.closed_waiter) - - @property - def sockets(self) -> Optional[List[socket.socket]]: - """ - List of :class:`~socket.socket` objects the server is listening to. - - ``None`` if the server is closed. - - """ - return self.server.sockets - - -class Serve: - """ - - Create, start, and return a WebSocket server on ``host`` and ``port``. - - Whenever a client connects, the server accepts the connection, creates a - :class:`WebSocketServerProtocol`, performs the opening handshake, and - delegates to the connection handler defined by ``ws_handler``. Once the - handler completes, either normally or with an exception, the server - performs the closing handshake and closes the connection. - - Awaiting :func:`serve` yields a :class:`WebSocketServer`. This instance - provides :meth:`~websockets.server.WebSocketServer.close` and - :meth:`~websockets.server.WebSocketServer.wait_closed` methods for - terminating the server and cleaning up its resources. - - When a server is closed with :meth:`~WebSocketServer.close`, it closes all - connections with close code 1001 (going away). Connections handlers, which - are running the ``ws_handler`` coroutine, will receive a - :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their - current or next interaction with the WebSocket connection. - - :func:`serve` can also be used as an asynchronous context manager. In - this case, the server is shut down when exiting the context. - - :func:`serve` is a wrapper around the event loop's - :meth:`~asyncio.loop.create_server` method. It creates and starts a - :class:`~asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it - wraps the :class:`~asyncio.Server` in a :class:`WebSocketServer` and - returns the :class:`WebSocketServer`. - - The ``ws_handler`` argument is the WebSocket handler. It must be a - coroutine accepting two arguments: a :class:`WebSocketServerProtocol` and - the request URI. - - The ``host`` and ``port`` arguments, as well as unrecognized keyword - arguments, are passed along to :meth:`~asyncio.loop.create_server`. - - For example, you can set the ``ssl`` keyword argument to a - :class:`~ssl.SSLContext` to enable TLS. - - The ``create_protocol`` parameter allows customizing the - :class:`~asyncio.Protocol` that manages the connection. It should be a - callable or class accepting the same arguments as - :class:`WebSocketServerProtocol` and returning an instance of - :class:`WebSocketServerProtocol` or a subclass. It defaults to - :class:`WebSocketServerProtocol`. - - The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, - ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is - described in :class:`~websockets.protocol.WebSocketCommonProtocol`. - - :func:`serve` also accepts the following optional arguments: - - * ``compression`` is a shortcut to configure compression extensions; - by default it enables the "permessage-deflate" extension; set it to - ``None`` to disable compression - * ``origins`` defines acceptable Origin HTTP headers; include ``None`` if - the lack of an origin is acceptable - * ``extensions`` is a list of supported extensions in order of - decreasing preference - * ``subprotocols`` is a list of supported subprotocols in order of - decreasing preference - * ``extra_headers`` sets additional HTTP response headers when the - handshake succeeds; it can be a :class:`~websockets.http.Headers` - instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, - value)`` pairs, or a callable taking the request path and headers in - arguments and returning one of the above - * ``process_request`` allows intercepting the HTTP request; it must be a - coroutine taking the request path and headers in argument; see - :meth:`~WebSocketServerProtocol.process_request` for details - * ``select_subprotocol`` allows customizing the logic for selecting a - subprotocol; it must be a callable taking the subprotocols offered by - the client and available on the server in argument; see - :meth:`~WebSocketServerProtocol.select_subprotocol` for details - - Since there's no useful way to propagate exceptions triggered in handlers, - they're sent to the ``'websockets.server'`` logger instead. Debugging is - much easier if you configure logging to print them:: - - import logging - logger = logging.getLogger('websockets.server') - logger.setLevel(logging.ERROR) - logger.addHandler(logging.StreamHandler()) - - """ - - def __init__( - self, - ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], - host: Optional[Union[str, Sequence[str]]] = None, - port: Optional[int] = None, - *, - path: Optional[str] = None, - create_protocol: Optional[Type[WebSocketServerProtocol]] = None, - ping_interval: Optional[float] = 20, - ping_timeout: Optional[float] = 20, - close_timeout: Optional[float] = None, - max_size: Optional[int] = 2 ** 20, - max_queue: Optional[int] = 2 ** 5, - read_limit: int = 2 ** 16, - write_limit: int = 2 ** 16, - loop: Optional[asyncio.AbstractEventLoop] = None, - legacy_recv: bool = False, - klass: Optional[Type[WebSocketServerProtocol]] = None, - timeout: Optional[float] = None, - compression: Optional[str] = "deflate", - origins: Optional[Sequence[Optional[Origin]]] = None, - extensions: Optional[Sequence[ServerExtensionFactory]] = None, - subprotocols: Optional[Sequence[Subprotocol]] = None, - extra_headers: Optional[HeadersLikeOrCallable] = None, - process_request: Optional[ - Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]] - ] = None, - select_subprotocol: Optional[ - Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] - ] = None, - **kwargs: Any, - ) -> None: - # Backwards compatibility: close_timeout used to be called timeout. - if timeout is None: - timeout = 10 - else: - warnings.warn("rename timeout to close_timeout", DeprecationWarning) - # If both are specified, timeout is ignored. - if close_timeout is None: - close_timeout = timeout - - # Backwards compatibility: create_protocol used to be called klass. - if klass is None: - klass = WebSocketServerProtocol - else: - warnings.warn("rename klass to create_protocol", DeprecationWarning) - # If both are specified, klass is ignored. - if create_protocol is None: - create_protocol = klass - - if loop is None: - loop = asyncio.get_event_loop() - - ws_server = WebSocketServer(loop) - - secure = kwargs.get("ssl") is not None - - if compression == "deflate": - if extensions is None: - extensions = [] - if not any( - ext_factory.name == ServerPerMessageDeflateFactory.name - for ext_factory in extensions - ): - extensions = list(extensions) + [ServerPerMessageDeflateFactory()] - elif compression is not None: - raise ValueError(f"unsupported compression: {compression}") - - factory = functools.partial( - create_protocol, - ws_handler, - ws_server, - host=host, - port=port, - secure=secure, - ping_interval=ping_interval, - ping_timeout=ping_timeout, - close_timeout=close_timeout, - max_size=max_size, - max_queue=max_queue, - read_limit=read_limit, - write_limit=write_limit, - loop=loop, - legacy_recv=legacy_recv, - origins=origins, - extensions=extensions, - subprotocols=subprotocols, - extra_headers=extra_headers, - process_request=process_request, - select_subprotocol=select_subprotocol, - ) - - if path is None: - create_server = functools.partial( - loop.create_server, factory, host, port, **kwargs - ) - else: - # unix_serve(path) must not specify host and port parameters. - assert host is None and port is None - create_server = functools.partial( - loop.create_unix_server, factory, path, **kwargs - ) - - # This is a coroutine function. - self._create_server = create_server - self.ws_server = ws_server - - # async with serve(...) - - async def __aenter__(self) -> WebSocketServer: - return await self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], - ) -> None: - self.ws_server.close() - await self.ws_server.wait_closed() - - # await serve(...) - - def __await__(self) -> Generator[Any, None, WebSocketServer]: - # Create a suitable iterator by calling __await__ on a coroutine. - return self.__await_impl__().__await__() - - async def __await_impl__(self) -> WebSocketServer: - server = await self._create_server() - self.ws_server.wrap(server) - return self.ws_server - - # yield from serve(...) - - __iter__ = __await__ - - -serve = Serve - - -def unix_serve( - ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], - path: str, - **kwargs: Any, -) -> Serve: - """ - Similar to :func:`serve`, but for listening on Unix sockets. - - This function calls the event loop's - :meth:`~asyncio.loop.create_unix_server` method. - - It is only available on Unix. - - It's useful for deploying a server behind a reverse proxy such as nginx. - - :param path: file system path to the Unix socket - - """ - return serve(ws_handler, path=path, **kwargs) +__all__ = [ + "serve", + "unix_serve", + "WebSocketServerProtocol", + "WebSocketServer", +] diff --git a/tests/test_client_server.py b/tests/test_asyncio_client_server.py similarity index 98% rename from tests/test_client_server.py rename to tests/test_asyncio_client_server.py index db26d6583..cff76d1f2 100644 --- a/tests/test_client_server.py +++ b/tests/test_asyncio_client_server.py @@ -13,7 +13,8 @@ import urllib.request import warnings -from websockets.client import * +from websockets.asyncio_client import * +from websockets.asyncio_server import * from websockets.datastructures import Headers from websockets.exceptions import ( ConnectionClosed, @@ -31,7 +32,6 @@ from websockets.http import USER_AGENT from websockets.http_legacy import read_response from websockets.protocol import State -from websockets.server import * from websockets.uri import parse_uri from .test_protocol import MS @@ -1072,7 +1072,7 @@ def test_subprotocol_error_two_subprotocols(self, _process_subprotocol): self.run_loop_once() @with_server() - @unittest.mock.patch("websockets.server.read_request") + @unittest.mock.patch("websockets.asyncio_server.read_request") def test_server_receives_malformed_request(self, _read_request): _read_request.side_effect = ValueError("read_request failed") @@ -1080,7 +1080,7 @@ def test_server_receives_malformed_request(self, _read_request): self.start_client() @with_server() - @unittest.mock.patch("websockets.client.read_response") + @unittest.mock.patch("websockets.asyncio_client.read_response") def test_client_receives_malformed_response(self, _read_response): _read_response.side_effect = ValueError("read_response failed") @@ -1089,7 +1089,7 @@ def test_client_receives_malformed_response(self, _read_response): self.run_loop_once() @with_server() - @unittest.mock.patch("websockets.client.build_request") + @unittest.mock.patch("websockets.asyncio_client.build_request") def test_client_sends_invalid_handshake_request(self, _build_request): def wrong_build_request(headers): return "42" @@ -1100,7 +1100,7 @@ def wrong_build_request(headers): self.start_client() @with_server() - @unittest.mock.patch("websockets.server.build_response") + @unittest.mock.patch("websockets.asyncio_server.build_response") def test_server_sends_invalid_handshake_response(self, _build_response): def wrong_build_response(headers, key): return build_response(headers, "42") @@ -1111,7 +1111,7 @@ def wrong_build_response(headers, key): self.start_client() @with_server() - @unittest.mock.patch("websockets.client.read_response") + @unittest.mock.patch("websockets.asyncio_client.read_response") def test_server_does_not_switch_protocols(self, _read_response): async def wrong_read_response(stream): status_code, reason, headers = await read_response(stream) @@ -1124,7 +1124,9 @@ async def wrong_read_response(stream): self.run_loop_once() @with_server() - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.process_request") + @unittest.mock.patch( + "websockets.asyncio_server.WebSocketServerProtocol.process_request" + ) def test_server_error_in_handshake(self, _process_request): _process_request.side_effect = Exception("process_request crashed") @@ -1132,7 +1134,7 @@ def test_server_error_in_handshake(self, _process_request): self.start_client() @with_server() - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.send") + @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.send") def test_server_handler_crashes(self, send): send.side_effect = ValueError("send failed") @@ -1145,7 +1147,7 @@ def test_server_handler_crashes(self, send): self.assertEqual(self.client.close_code, 1011) @with_server() - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") + @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.close") def test_server_close_crashes(self, close): close.side_effect = ValueError("close failed") @@ -1220,7 +1222,9 @@ def test_invalid_status_error_during_client_connect(self): @unittest.mock.patch( "websockets.server.WebSocketServerProtocol.write_http_response" ) - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.read_http_request") + @unittest.mock.patch( + "websockets.asyncio_server.WebSocketServerProtocol.read_http_request" + ) def test_connection_error_during_opening_handshake( self, _read_http_request, _write_http_response ): @@ -1238,7 +1242,7 @@ def test_connection_error_during_opening_handshake( _write_http_response.assert_not_called() @with_server() - @unittest.mock.patch("websockets.server.WebSocketServerProtocol.close") + @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.close") def test_connection_error_during_closing_handshake(self, close): close.side_effect = ConnectionError diff --git a/tests/test_auth.py b/tests/test_auth.py index 97a4485a0..c693c9f45 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -6,7 +6,7 @@ from websockets.exceptions import InvalidStatusCode from websockets.headers import build_authorization_basic -from .test_client_server import ClientServerTestsMixin, with_client, with_server +from .test_asyncio_client_server import ClientServerTestsMixin, with_client, with_server from .utils import AsyncioTestCase From 80a8ac8194a9b3591549c6c5bc023f14f1f2c168 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 18 Feb 2020 22:04:00 +0100 Subject: [PATCH 206/281] Implement sans-I/O handshake. --- src/websockets/__init__.py | 2 + src/websockets/asyncio_server.py | 2 +- src/websockets/client.py | 291 ++++++++++++++ src/websockets/connection.py | 88 +++++ src/websockets/events.py | 27 ++ src/websockets/server.py | 426 ++++++++++++++++++++ tests/extensions/utils.py | 76 ++++ tests/test_client.py | 545 ++++++++++++++++++++++++++ tests/test_http11.py | 2 +- tests/test_protocol.py | 10 +- tests/test_server.py | 649 +++++++++++++++++++++++++++++++ tests/utils.py | 4 + 12 files changed, 2115 insertions(+), 7 deletions(-) create mode 100644 src/websockets/connection.py create mode 100644 src/websockets/events.py create mode 100644 tests/extensions/utils.py create mode 100644 tests/test_client.py create mode 100644 tests/test_server.py diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 89829235c..c4accaca1 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -15,6 +15,7 @@ "AbortHandshake", "basic_auth_protocol_factory", "BasicAuthWebSocketServerProtocol", + "ClientConnection", "connect", "ConnectionClosed", "ConnectionClosedError", @@ -43,6 +44,7 @@ "RedirectHandshake", "SecurityError", "serve", + "ServerConnection", "Subprotocol", "unix_connect", "unix_serve", diff --git a/src/websockets/asyncio_server.py b/src/websockets/asyncio_server.py index 1eeddf0eb..89ddf6c7d 100644 --- a/src/websockets/asyncio_server.py +++ b/src/websockets/asyncio_server.py @@ -341,7 +341,7 @@ def process_origin( # "The user agent MUST NOT include more than one Origin header field" # per https://tools.ietf.org/html/rfc6454#section-7.3. try: - origin = cast(Origin, headers.get("Origin")) + origin = cast(Optional[Origin], headers.get("Origin")) except MultipleValuesError as exc: raise InvalidHeader("Origin", "more than one Origin header found") from exc if origins is not None: diff --git a/src/websockets/client.py b/src/websockets/client.py index c7d153f13..ec4eb88f5 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -1,8 +1,299 @@ +import collections +import logging +from typing import Generator, List, Optional, Sequence + from .asyncio_client import WebSocketClientProtocol, connect, unix_connect +from .connection import CLIENT, CONNECTING, OPEN, Connection +from .datastructures import Headers, HeadersLike, MultipleValuesError +from .events import Accept, Connect, Event, Reject +from .exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidHeaderValue, + InvalidStatusCode, + InvalidUpgrade, + NegotiationError, +) +from .extensions.base import ClientExtensionFactory, Extension +from .headers import ( + build_authorization_basic, + build_extension, + build_subprotocol, + parse_connection, + parse_extension, + parse_subprotocol, + parse_upgrade, +) +from .http import USER_AGENT +from .http11 import Request, Response +from .typing import ( + ConnectionOption, + ExtensionHeader, + Origin, + Subprotocol, + UpgradeProtocol, +) +from .uri import parse_uri +from .utils import accept_key, generate_key __all__ = [ "connect", "unix_connect", + "ClientConnection", "WebSocketClientProtocol", ] + +logger = logging.getLogger(__name__) + + +class ClientConnection(Connection): + + side = CLIENT + + def __init__( + self, + uri: str, + origin: Optional[Origin] = None, + extensions: Optional[Sequence[ClientExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLike] = None, + ): + super().__init__(state=CONNECTING) + self.wsuri = parse_uri(uri) + self.origin = origin + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + self.key = generate_key() + + def connect(self) -> Connect: + """ + Create a Connect event to send to the server. + + """ + headers = Headers() + + if self.wsuri.port == (443 if self.wsuri.secure else 80): + headers["Host"] = self.wsuri.host + else: + headers["Host"] = f"{self.wsuri.host}:{self.wsuri.port}" + + if self.wsuri.user_info: + headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info) + + if self.origin is not None: + headers["Origin"] = self.origin + + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Key"] = self.key + headers["Sec-WebSocket-Version"] = "13" + + if self.available_extensions is not None: + extensions_header = build_extension( + [ + (extension_factory.name, extension_factory.get_request_params()) + for extension_factory in self.available_extensions + ] + ) + headers["Sec-WebSocket-Extensions"] = extensions_header + + if self.available_subprotocols is not None: + protocol_header = build_subprotocol(self.available_subprotocols) + headers["Sec-WebSocket-Protocol"] = protocol_header + + if self.extra_headers is not None: + extra_headers = self.extra_headers + if isinstance(extra_headers, Headers): + extra_headers = extra_headers.raw_items() + elif isinstance(extra_headers, collections.abc.Mapping): + extra_headers = extra_headers.items() + for name, value in extra_headers: + headers[name] = value + + headers.setdefault("User-Agent", USER_AGENT) + + request = Request(self.wsuri.resource_name, headers) + return Connect(request) + + def process_response(self, response: Response) -> None: + """ + Check a handshake response received from the server. + + :param response: response + :param key: comes from :func:`build_request` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake response + is invalid + + """ + + if response.status_code != 101: + raise InvalidStatusCode(response.status_code) + + headers = response.headers + + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade( + "Connection", ", ".join(connection) if connection else None + ) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. It's supposed to be 'WebSocket'. + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None) + + try: + s_w_accept = headers["Sec-WebSocket-Accept"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Accept") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Accept", + "more than one Sec-WebSocket-Accept header found", + ) + + if s_w_accept != accept_key(self.key): + raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) + + self.extensions = self.process_extensions(headers) + + self.subprotocol = self.process_subprotocol(headers) + + def process_extensions(self, headers: Headers) -> List[Extension]: + """ + Handle the Sec-WebSocket-Extensions HTTP response header. + + Check that each extension is supported, as well as its parameters. + + Return the list of accepted extensions. + + Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the + connection. + + :rfc:`6455` leaves the rules up to the specification of each + extension. + + To provide this level of flexibility, for each extension accepted by + the server, we check for a match with each extension available in the + client configuration. If no match is found, an exception is raised. + + If several variants of the same extension are accepted by the server, + it may be configured severel times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + """ + accepted_extensions: List[Extension] = [] + + extensions = headers.get_all("Sec-WebSocket-Extensions") + + if extensions: + + if self.available_extensions is None: + raise InvalidHandshake("no extensions supported") + + parsed_extensions: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in extensions], [] + ) + + for name, response_params in parsed_extensions: + + for extension_factory in self.available_extensions: + + # Skip non-matching extensions based on their name. + if extension_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + extension = extension_factory.process_response_params( + response_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the server sent. Fail the connection. + else: + raise NegotiationError( + f"Unsupported extension: " + f"name = {name}, params = {response_params}" + ) + + return accepted_extensions + + def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP response header. + + Check that it contains exactly one supported subprotocol. + + Return the selected subprotocol. + + """ + subprotocol: Optional[Subprotocol] = None + + subprotocols = headers.get_all("Sec-WebSocket-Protocol") + + if subprotocols: + + if self.available_subprotocols is None: + raise InvalidHandshake("no subprotocols supported") + + parsed_subprotocols: Sequence[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in subprotocols], [] + ) + + if len(parsed_subprotocols) > 1: + subprotocols_display = ", ".join(parsed_subprotocols) + raise InvalidHandshake(f"multiple subprotocols: {subprotocols_display}") + + subprotocol = parsed_subprotocols[0] + + if subprotocol not in self.available_subprotocols: + raise NegotiationError(f"unsupported subprotocol: {subprotocol}") + + return subprotocol + + def send_in_connecting_state(self, event: Event) -> bytes: + assert isinstance(event, Connect) + + request = event.request + + logger.debug("%s > GET %s HTTP/1.1", self.side, request.path) + logger.debug("%s > %r", self.side, request.headers) + + return request.serialize() + + def parse(self) -> Generator[None, None, None]: + response = yield from Response.parse( + self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof, + ) + assert self.state == CONNECTING + try: + self.process_response(response) + except InvalidHandshake as exc: + self.events.append(Reject(response, exc)) + return + else: + self.events.append(Accept(response)) + self.state = OPEN + yield from super().parse() diff --git a/src/websockets/connection.py b/src/websockets/connection.py new file mode 100644 index 000000000..5789b6ea1 --- /dev/null +++ b/src/websockets/connection.py @@ -0,0 +1,88 @@ +import enum +from typing import Generator, Iterable, List, Tuple + +from .events import Event +from .exceptions import InvalidState +from .streams import StreamReader + + +__all__ = ["Connection"] + + +# A WebSocket connection is either a server or a client. + + +class Side(enum.IntEnum): + SERVER, CLIENT = range(2) + + +SERVER = Side.SERVER +CLIENT = Side.CLIENT + + +# A WebSocket connection goes through the following four states, in order: + + +class State(enum.IntEnum): + CONNECTING, OPEN, CLOSING, CLOSED = range(4) + + +CONNECTING = State.CONNECTING +OPEN = State.OPEN +CLOSING = State.CLOSING +CLOSED = State.CLOSED + + +class Connection: + + side: Side + + def __init__(self, state: State = OPEN) -> None: + self.state = state + self.reader = StreamReader() + self.events: List[Event] = [] + self.parser = self.parse() + next(self.parser) # start coroutine + + # Public APIs for receiving data and producing events + + def receive_data(self, data: bytes) -> Tuple[Iterable[Event], bytes]: + self.reader.feed_data(data) + return self.receive() + + def receive_eof(self) -> Tuple[Iterable[Event], bytes]: + self.reader.feed_eof() + return self.receive() + + # Public APIs for receiving events and producing data + + def send(self, event: Event) -> bytes: + """ + Send an event to the remote endpoint. + + """ + if self.state == OPEN: + raise NotImplementedError # not implemented yet + elif self.state == CONNECTING: + return self.send_in_connecting_state(event) + else: + raise InvalidState( + f"Cannot write to a WebSocket in the {self.state.name} state" + ) + + # Private APIs + + def send_in_connecting_state(self, event: Event) -> bytes: + raise NotImplementedError + + def receive(self) -> Tuple[List[Event], bytes]: + # Run parser until more data is needed or EOF + try: + next(self.parser) + except StopIteration: + pass + events, self.events = self.events, [] + return events, b"" + + def parse(self) -> Generator[None, None, None]: + yield # not implemented yet diff --git a/src/websockets/events.py b/src/websockets/events.py new file mode 100644 index 000000000..196de9421 --- /dev/null +++ b/src/websockets/events.py @@ -0,0 +1,27 @@ +from typing import NamedTuple, Optional, Union + +from .http11 import Request, Response + + +__all__ = [ + "Accept", + "Connect", + "Event", + "Reject", +] + + +class Connect(NamedTuple): + request: Request + + +class Accept(NamedTuple): + response: Response + + +class Reject(NamedTuple): + response: Response + exception: Optional[Exception] + + +Event = Union[Connect, Accept, Reject] diff --git a/src/websockets/server.py b/src/websockets/server.py index ec94a2fbf..f668ff5e7 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -1,9 +1,435 @@ +import base64 +import binascii +import collections +import email.utils +import http +import logging +from typing import Callable, Generator, List, Optional, Sequence, Tuple, Union, cast + from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve +from .connection import CONNECTING, OPEN, SERVER, Connection +from .datastructures import Headers, HeadersLike, MultipleValuesError +from .events import Accept, Connect, Event, Reject +from .exceptions import ( + InvalidHandshake, + InvalidHeader, + InvalidHeaderValue, + InvalidOrigin, + InvalidUpgrade, + NegotiationError, +) +from .extensions.base import Extension, ServerExtensionFactory +from .headers import ( + build_extension, + parse_connection, + parse_extension, + parse_subprotocol, + parse_upgrade, +) +from .http import USER_AGENT +from .http11 import Request, Response +from .typing import ( + ConnectionOption, + ExtensionHeader, + Origin, + Subprotocol, + UpgradeProtocol, +) +from .utils import accept_key __all__ = [ "serve", "unix_serve", + "ServerConnection", "WebSocketServerProtocol", "WebSocketServer", ] + +logger = logging.getLogger(__name__) + + +HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] + + +class ServerConnection(Connection): + + side = SERVER + + def __init__( + self, + origins: Optional[Sequence[Optional[Origin]]] = None, + extensions: Optional[Sequence[ServerExtensionFactory]] = None, + subprotocols: Optional[Sequence[Subprotocol]] = None, + extra_headers: Optional[HeadersLikeOrCallable] = None, + ): + super().__init__(state=CONNECTING) + self.origins = origins + self.available_extensions = extensions + self.available_subprotocols = subprotocols + self.extra_headers = extra_headers + + def accept(self, connect: Connect) -> Union[Accept, Reject]: + """ + Create an ``Accept`` or ``Reject`` event to send to the client. + + If the connection cannot be established, this method returns a + :class:`~websockets.events.Reject` event, which may be unexpected. + + """ + request = connect.request + try: + key, extensions_header, protocol_header = self.process_request(request) + except InvalidOrigin as exc: + logger.debug("Invalid origin", exc_info=True) + return self.reject( + http.HTTPStatus.FORBIDDEN, + f"Failed to open a WebSocket connection: {exc}.\n", + exception=exc, + ) + except InvalidUpgrade as exc: + logger.debug("Invalid upgrade", exc_info=True) + return self.reject( + http.HTTPStatus.UPGRADE_REQUIRED, + ( + f"Failed to open a WebSocket connection: {exc}.\n" + f"\n" + f"You cannot access a WebSocket server directly " + f"with a browser. You need a WebSocket client.\n" + ), + headers=Headers([("Upgrade", "websocket")]), + exception=exc, + ) + except InvalidHandshake as exc: + logger.debug("Invalid handshake", exc_info=True) + return self.reject( + http.HTTPStatus.BAD_REQUEST, + f"Failed to open a WebSocket connection: {exc}.\n", + exception=exc, + ) + except Exception as exc: + logger.warning("Error in opening handshake", exc_info=True) + return self.reject( + http.HTTPStatus.INTERNAL_SERVER_ERROR, + ( + "Failed to open a WebSocket connection.\n" + "See server log for more information.\n" + ), + exception=exc, + ) + + headers = Headers() + + headers["Upgrade"] = "websocket" + headers["Connection"] = "Upgrade" + headers["Sec-WebSocket-Accept"] = accept_key(key) + + if extensions_header is not None: + headers["Sec-WebSocket-Extensions"] = extensions_header + + if protocol_header is not None: + headers["Sec-WebSocket-Protocol"] = protocol_header + + extra_headers: Optional[HeadersLike] + if callable(self.extra_headers): + extra_headers = self.extra_headers(request.path, request.headers) + else: + extra_headers = self.extra_headers + if extra_headers is not None: + if isinstance(extra_headers, Headers): + extra_headers = extra_headers.raw_items() + elif isinstance(extra_headers, collections.abc.Mapping): + extra_headers = extra_headers.items() + for name, value in extra_headers: + headers[name] = value + + headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + headers.setdefault("Server", USER_AGENT) + + response = Response(101, "Switching Protocols", headers) + return Accept(response) + + def process_request( + self, request: Request + ) -> Tuple[str, Optional[str], Optional[str]]: + """ + Check a handshake request received from the client. + + This function doesn't verify that the request is an HTTP/1.1 or higher GET + request and doesn't perform ``Host`` and ``Origin`` checks. These controls + are usually performed earlier in the HTTP request handling code. They're + the responsibility of the caller. + + :param request: request + :returns: ``key`` which must be passed to :func:`build_response` + :raises ~websockets.exceptions.InvalidHandshake: if the handshake request + is invalid; then the server must return 400 Bad Request error + + """ + headers = request.headers + + connection: List[ConnectionOption] = sum( + [parse_connection(value) for value in headers.get_all("Connection")], [] + ) + + if not any(value.lower() == "upgrade" for value in connection): + raise InvalidUpgrade( + "Connection", ", ".join(connection) if connection else None + ) + + upgrade: List[UpgradeProtocol] = sum( + [parse_upgrade(value) for value in headers.get_all("Upgrade")], [] + ) + + # For compatibility with non-strict implementations, ignore case when + # checking the Upgrade header. The RFC always uses "websocket", except + # in section 11.2. (IANA registration) where it uses "WebSocket". + if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"): + raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None) + + try: + key = headers["Sec-WebSocket-Key"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Key") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" + ) + + try: + raw_key = base64.b64decode(key.encode(), validate=True) + except binascii.Error: + raise InvalidHeaderValue("Sec-WebSocket-Key", key) + if len(raw_key) != 16: + raise InvalidHeaderValue("Sec-WebSocket-Key", key) + + try: + version = headers["Sec-WebSocket-Version"] + except KeyError: + raise InvalidHeader("Sec-WebSocket-Version") + except MultipleValuesError: + raise InvalidHeader( + "Sec-WebSocket-Version", + "more than one Sec-WebSocket-Version header found", + ) + + if version != "13": + raise InvalidHeaderValue("Sec-WebSocket-Version", version) + + self.origin = self.process_origin(headers) + + extensions_header, self.extensions = self.process_extensions(headers) + + protocol_header = self.subprotocol = self.process_subprotocol(headers) + + return key, extensions_header, protocol_header + + def process_origin(self, headers: Headers) -> Optional[Origin]: + """ + Handle the Origin HTTP request header. + + :param headers: request headers + :raises ~websockets.exceptions.InvalidOrigin: if the origin isn't + acceptable + + """ + # "The user agent MUST NOT include more than one Origin header field" + # per https://tools.ietf.org/html/rfc6454#section-7.3. + try: + origin = cast(Optional[Origin], headers.get("Origin")) + except MultipleValuesError as exc: + raise InvalidHeader("Origin", "more than one Origin header found") from exc + if self.origins is not None: + if origin not in self.origins: + raise InvalidOrigin(origin) + return origin + + def process_extensions( + self, headers: Headers, + ) -> Tuple[Optional[str], List[Extension]]: + """ + Handle the Sec-WebSocket-Extensions HTTP request header. + + Accept or reject each extension proposed in the client request. + Negotiate parameters for accepted extensions. + + Return the Sec-WebSocket-Extensions HTTP response header and the list + of accepted extensions. + + :rfc:`6455` leaves the rules up to the specification of each + :extension. + + To provide this level of flexibility, for each extension proposed by + the client, we check for a match with each extension available in the + server configuration. If no match is found, the extension is ignored. + + If several variants of the same extension are proposed by the client, + it may be accepted several times, which won't make sense in general. + Extensions must implement their own requirements. For this purpose, + the list of previously accepted extensions is provided. + + This process doesn't allow the server to reorder extensions. It can + only select a subset of the extensions proposed by the client. + + Other requirements, for example related to mandatory extensions or the + order of extensions, may be implemented by overriding this method. + + :param headers: request headers + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + + """ + response_header_value: Optional[str] = None + + extension_headers: List[ExtensionHeader] = [] + accepted_extensions: List[Extension] = [] + + header_values = headers.get_all("Sec-WebSocket-Extensions") + + if header_values and self.available_extensions: + + parsed_header_values: List[ExtensionHeader] = sum( + [parse_extension(header_value) for header_value in header_values], [] + ) + + for name, request_params in parsed_header_values: + + for ext_factory in self.available_extensions: + + # Skip non-matching extensions based on their name. + if ext_factory.name != name: + continue + + # Skip non-matching extensions based on their params. + try: + response_params, extension = ext_factory.process_request_params( + request_params, accepted_extensions + ) + except NegotiationError: + continue + + # Add matching extension to the final list. + extension_headers.append((name, response_params)) + accepted_extensions.append(extension) + + # Break out of the loop once we have a match. + break + + # If we didn't break from the loop, no extension in our list + # matched what the client sent. The extension is declined. + + # Serialize extension header. + if extension_headers: + response_header_value = build_extension(extension_headers) + + return response_header_value, accepted_extensions + + def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: + """ + Handle the Sec-WebSocket-Protocol HTTP request header. + + Return Sec-WebSocket-Protocol HTTP response header, which is the same + as the selected subprotocol. + + :param headers: request headers + :raises ~websockets.exceptions.InvalidHandshake: to abort the + handshake with an HTTP 400 error code + + """ + subprotocol: Optional[Subprotocol] = None + + header_values = headers.get_all("Sec-WebSocket-Protocol") + + if header_values and self.available_subprotocols: + + parsed_header_values: List[Subprotocol] = sum( + [parse_subprotocol(header_value) for header_value in header_values], [] + ) + + subprotocol = self.select_subprotocol( + parsed_header_values, self.available_subprotocols + ) + + return subprotocol + + def select_subprotocol( + self, + client_subprotocols: Sequence[Subprotocol], + server_subprotocols: Sequence[Subprotocol], + ) -> Optional[Subprotocol]: + """ + Pick a subprotocol among those offered by the client. + + If several subprotocols are supported by the client and the server, + the default implementation selects the preferred subprotocols by + giving equal value to the priorities of the client and the server. + + If no common subprotocol is supported by the client and the server, it + proceeds without a subprotocol. + + This is unlikely to be the most useful implementation in practice, as + many servers providing a subprotocol will require that the client uses + that subprotocol. + + :param client_subprotocols: list of subprotocols offered by the client + :param server_subprotocols: list of subprotocols available on the server + + """ + subprotocols = set(client_subprotocols) & set(server_subprotocols) + if not subprotocols: + return None + priority = lambda p: ( + client_subprotocols.index(p) + server_subprotocols.index(p) + ) + return sorted(subprotocols, key=priority)[0] + + def reject( + self, + status: http.HTTPStatus, + text: str, + headers: Optional[Headers] = None, + exception: Optional[Exception] = None, + ) -> Reject: + """ + Create a ``Reject`` event to send to the client. + + A short plain text response is the best fallback when failing to + establish a WebSocket connection. + + """ + body = text.encode() + if headers is None: + headers = Headers() + headers.setdefault("Date", email.utils.formatdate(usegmt=True)) + headers.setdefault("Server", USER_AGENT) + headers.setdefault("Content-Length", str(len(body))) + headers.setdefault("Content-Type", "text/plain; charset=utf-8") + headers.setdefault("Connection", "close") + response = Response(status.value, status.phrase, headers, body) + return Reject(response, exception) + + def send_in_connecting_state(self, event: Event) -> bytes: + assert isinstance(event, (Accept, Reject)) + + if isinstance(event, Accept): + self.state = OPEN + + response = event.response + + logger.debug( + "%s > HTTP/1.1 %d %s", + self.side, + response.status_code, + response.reason_phrase, + ) + logger.debug("%s > %r", self.side, response.headers) + if response.body is not None: + logger.debug("%s > body (%d bytes)", self.side, len(response.body)) + + return response.serialize() + + def parse(self) -> Generator[None, None, None]: + request = yield from Request.parse(self.reader.read_line) + assert self.state == CONNECTING + self.events.append(Connect(request)) + yield from super().parse() diff --git a/tests/extensions/utils.py b/tests/extensions/utils.py new file mode 100644 index 000000000..81990bb07 --- /dev/null +++ b/tests/extensions/utils.py @@ -0,0 +1,76 @@ +from websockets.exceptions import NegotiationError + + +class OpExtension: + name = "x-op" + + def __init__(self, op=None): + self.op = op + + def decode(self, frame, *, max_size=None): + return frame # pragma: no cover + + def encode(self, frame): + return frame # pragma: no cover + + def __eq__(self, other): + return isinstance(other, OpExtension) and self.op == other.op + + +class ClientOpExtensionFactory: + name = "x-op" + + def __init__(self, op=None): + self.op = op + + def get_request_params(self): + return [("op", self.op)] + + def process_response_params(self, params, accepted_extensions): + if params != [("op", self.op)]: + raise NegotiationError() + return OpExtension(self.op) + + +class ServerOpExtensionFactory: + name = "x-op" + + def __init__(self, op=None): + self.op = op + + def process_request_params(self, params, accepted_extensions): + if params != [("op", self.op)]: + raise NegotiationError() + return [("op", self.op)], OpExtension(self.op) + + +class Rsv2Extension: + name = "x-rsv2" + + def decode(self, frame, *, max_size=None): + assert frame.rsv2 + return frame._replace(rsv2=False) + + def encode(self, frame): + assert not frame.rsv2 + return frame._replace(rsv2=True) + + def __eq__(self, other): + return isinstance(other, Rsv2Extension) + + +class ClientRsv2ExtensionFactory: + name = "x-rsv2" + + def get_request_params(self): + return [] + + def process_response_params(self, params, accepted_extensions): + return Rsv2Extension() + + +class ServerRsv2ExtensionFactory: + name = "x-rsv2" + + def process_request_params(self, params, accepted_extensions): + return [], Rsv2Extension() diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 000000000..1cf27349d --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,545 @@ +import unittest +import unittest.mock + +from websockets.client import * +from websockets.connection import CONNECTING, OPEN +from websockets.datastructures import Headers +from websockets.events import Accept, Connect, Reject +from websockets.exceptions import InvalidHandshake, InvalidHeader +from websockets.http import USER_AGENT +from websockets.http11 import Request, Response +from websockets.utils import accept_key + +from .extensions.utils import ( + ClientOpExtensionFactory, + ClientRsv2ExtensionFactory, + OpExtension, + Rsv2Extension, +) +from .test_utils import ACCEPT, KEY +from .utils import DATE + + +class ConnectTests(unittest.TestCase): + def test_send_connect(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("wss://example.com/test") + connect = client.connect() + self.assertIsInstance(connect, Connect) + bytes_to_send = client.send(connect) + self.assertEqual( + bytes_to_send, + ( + f"GET /test HTTP/1.1\r\n" + f"Host: example.com\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Key: {KEY}\r\n" + f"Sec-WebSocket-Version: 13\r\n" + f"User-Agent: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + + def test_connect_request(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("wss://example.com/test") + connect = client.connect() + self.assertIsInstance(connect.request, Request) + self.assertEqual(connect.request.path, "/test") + self.assertEqual( + connect.request.headers, + Headers( + { + "Host": "example.com", + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Key": KEY, + "Sec-WebSocket-Version": "13", + "User-Agent": USER_AGENT, + } + ), + ) + + def test_path(self): + client = ClientConnection("wss://example.com/endpoint?test=1") + request = client.connect().request + + self.assertEqual(request.path, "/endpoint?test=1") + + def test_port(self): + for uri, host in [ + ("ws://example.com/", "example.com"), + ("ws://example.com:80/", "example.com"), + ("ws://example.com:8080/", "example.com:8080"), + ("wss://example.com/", "example.com"), + ("wss://example.com:443/", "example.com"), + ("wss://example.com:8443/", "example.com:8443"), + ]: + with self.subTest(uri=uri): + client = ClientConnection(uri) + request = client.connect().request + + self.assertEqual(request.headers["Host"], host) + + def test_user_info(self): + client = ClientConnection("wss://hello:iloveyou@example.com/") + request = client.connect().request + + self.assertEqual(request.headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=") + + def test_origin(self): + client = ClientConnection("wss://example.com/", origin="https://example.com") + request = client.connect().request + + self.assertEqual(request.headers["Origin"], "https://example.com") + + def test_extensions(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientOpExtensionFactory()] + ) + request = client.connect().request + + self.assertEqual(request.headers["Sec-WebSocket-Extensions"], "x-op; op") + + def test_subprotocols(self): + client = ClientConnection("wss://example.com/", subprotocols=["chat"]) + request = client.connect().request + + self.assertEqual(request.headers["Sec-WebSocket-Protocol"], "chat") + + def test_extra_headers(self): + for extra_headers in [ + Headers({"X-Spam": "Eggs"}), + {"X-Spam": "Eggs"}, + [("X-Spam", "Eggs")], + ]: + with self.subTest(extra_headers=extra_headers): + client = ClientConnection( + "wss://example.com/", extra_headers=extra_headers + ) + request = client.connect().request + + self.assertEqual(request.headers["X-Spam"], "Eggs") + + def test_extra_headers_overrides_user_agent(self): + client = ClientConnection( + "wss://example.com/", extra_headers={"User-Agent": "Other"} + ) + request = client.connect().request + + self.assertEqual(request.headers["User-Agent"], "Other") + + +class AcceptRejectTests(unittest.TestCase): + def test_receive_accept(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("ws://example.com/test") + client.connect() + [accept], bytes_to_send = client.receive_data( + ( + f"HTTP/1.1 101 Switching Protocols\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Accept: {ACCEPT}\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + self.assertIsInstance(accept, Accept) + self.assertEqual(bytes_to_send, b"") + self.assertEqual(client.state, OPEN) + + def test_receive_reject(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("ws://example.com/test") + client.connect() + [reject], bytes_to_send = client.receive_data( + ( + f"HTTP/1.1 404 Not Found\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"Content-Length: 13\r\n" + f"Content-Type: text/plain; charset=utf-8\r\n" + f"Connection: close\r\n" + f"\r\n" + f"Sorry folks.\n" + ).encode(), + ) + self.assertIsInstance(reject, Reject) + self.assertEqual(bytes_to_send, b"") + self.assertEqual(client.state, CONNECTING) + + def test_accept_response(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("ws://example.com/test") + client.connect() + [accept], _bytes_to_send = client.receive_data( + ( + f"HTTP/1.1 101 Switching Protocols\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Accept: {ACCEPT}\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + self.assertEqual(accept.response.status_code, 101) + self.assertEqual(accept.response.reason_phrase, "Switching Protocols") + self.assertEqual( + accept.response.headers, + Headers( + { + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Accept": ACCEPT, + "Date": DATE, + "Server": USER_AGENT, + } + ), + ) + self.assertIsNone(accept.response.body) + + def test_reject_response(self): + with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): + client = ClientConnection("ws://example.com/test") + client.connect() + [reject], _bytes_to_send = client.receive_data( + ( + f"HTTP/1.1 404 Not Found\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"Content-Length: 13\r\n" + f"Content-Type: text/plain; charset=utf-8\r\n" + f"Connection: close\r\n" + f"\r\n" + f"Sorry folks.\n" + ).encode(), + ) + self.assertEqual(reject.response.status_code, 404) + self.assertEqual(reject.response.reason_phrase, "Not Found") + self.assertEqual( + reject.response.headers, + Headers( + { + "Date": DATE, + "Server": USER_AGENT, + "Content-Length": "13", + "Content-Type": "text/plain; charset=utf-8", + "Connection": "close", + } + ), + ) + self.assertEqual(reject.response.body, b"Sorry folks.\n") + + def make_accept_response(self, client): + request = client.connect().request + return Response( + status_code=101, + reason_phrase="Switching Protocols", + headers=Headers( + { + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Accept": accept_key( + request.headers["Sec-WebSocket-Key"] + ), + } + ), + ) + + def test_basic(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + + def test_missing_connection(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Connection"] + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Connection header") + + def test_invalid_connection(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Connection"] + response.headers["Connection"] = "close" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "invalid Connection header: close") + + def test_missing_upgrade(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Upgrade"] + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Upgrade header") + + def test_invalid_upgrade(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Upgrade"] + response.headers["Upgrade"] = "h2c" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "invalid Upgrade header: h2c") + + def test_missing_accept(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Sec-WebSocket-Accept"] + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Accept header") + + def test_multiple_accept(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Accept"] = ACCEPT + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "invalid Sec-WebSocket-Accept header: " + "more than one Sec-WebSocket-Accept header found", + ) + + def test_invalid_accept(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + del response.headers["Sec-WebSocket-Accept"] + response.headers["Sec-WebSocket-Accept"] = ACCEPT + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), f"invalid Sec-WebSocket-Accept header: {ACCEPT}" + ) + + def test_no_extensions(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, []) + + def test_no_extension(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientOpExtensionFactory()] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [OpExtension()]) + + def test_extension(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientRsv2ExtensionFactory()] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [Rsv2Extension()]) + + def test_unexpected_extension(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "no extensions supported") + + def test_unsupported_extension(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientRsv2ExtensionFactory()] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "Unsupported extension: name = x-op, params = [('op', None)]", + ) + + def test_supported_extension_parameters(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientOpExtensionFactory("this")] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op=this" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [OpExtension("this")]) + + def test_unsupported_extension_parameters(self): + client = ClientConnection( + "wss://example.com/", extensions=[ClientOpExtensionFactory("this")] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "Unsupported extension: name = x-op, params = [('op', 'that')]", + ) + + def test_multiple_supported_extension_parameters(self): + client = ClientConnection( + "wss://example.com/", + extensions=[ + ClientOpExtensionFactory("this"), + ClientOpExtensionFactory("that"), + ], + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [OpExtension("that")]) + + def test_multiple_extensions(self): + client = ClientConnection( + "wss://example.com/", + extensions=[ClientOpExtensionFactory(), ClientRsv2ExtensionFactory()], + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-op; op" + response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [OpExtension(), Rsv2Extension()]) + + def test_multiple_extensions_order(self): + client = ClientConnection( + "wss://example.com/", + extensions=[ClientOpExtensionFactory(), ClientRsv2ExtensionFactory()], + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" + response.headers["Sec-WebSocket-Extensions"] = "x-op; op" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.extensions, [Rsv2Extension(), OpExtension()]) + + def test_no_subprotocols(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertIsNone(client.subprotocol) + + def test_no_subprotocol(self): + client = ClientConnection("wss://example.com/", subprotocols=["chat"]) + response = self.make_accept_response(client) + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertIsNone(client.subprotocol) + + def test_subprotocol(self): + client = ClientConnection("wss://example.com/", subprotocols=["chat"]) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Protocol"] = "chat" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.subprotocol, "chat") + + def test_unexpected_subprotocol(self): + client = ClientConnection("wss://example.com/") + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Protocol"] = "chat" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "no subprotocols supported") + + def test_multiple_subprotocols(self): + client = ClientConnection( + "wss://example.com/", subprotocols=["superchat", "chat"] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Protocol"] = "superchat" + response.headers["Sec-WebSocket-Protocol"] = "chat" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), "multiple subprotocols: superchat, chat" + ) + + def test_supported_subprotocol(self): + client = ClientConnection( + "wss://example.com/", subprotocols=["superchat", "chat"] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Protocol"] = "chat" + [accept], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(accept, Accept) + self.assertEqual(client.subprotocol, "chat") + + def test_unsupported_subprotocol(self): + client = ClientConnection( + "wss://example.com/", subprotocols=["superchat", "chat"] + ) + response = self.make_accept_response(client) + response.headers["Sec-WebSocket-Protocol"] = "otherchat" + [reject], _bytes_to_send = client.receive_data(response.serialize()) + + self.assertIsInstance(reject, Reject) + with self.assertRaises(InvalidHandshake) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "unsupported subprotocol: otherchat") diff --git a/tests/test_http11.py b/tests/test_http11.py index bca874aee..4574cf97e 100644 --- a/tests/test_http11.py +++ b/tests/test_http11.py @@ -101,7 +101,7 @@ def setUp(self): def parse(self): return Response.parse( - self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof + self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof, ) def test_parse(self): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 91fb02a50..3054600e1 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -553,13 +553,13 @@ def test_recv_when_transfer_data_cancelled(self): def test_recv_prevents_concurrent_calls(self): recv = self.loop.create_task(self.protocol.recv()) - with self.assertRaisesRegex( - RuntimeError, + with self.assertRaises(RuntimeError) as raised: + self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual( + str(raised.exception), "cannot call recv while another coroutine " "is already waiting for the next message", - ): - self.loop.run_until_complete(self.protocol.recv()) - + ) recv.cancel() # Test the send coroutine. diff --git a/tests/test_server.py b/tests/test_server.py new file mode 100644 index 000000000..1d094a86d --- /dev/null +++ b/tests/test_server.py @@ -0,0 +1,649 @@ +import http +import unittest +import unittest.mock + +from websockets.connection import CONNECTING, OPEN +from websockets.datastructures import Headers +from websockets.events import Accept, Connect, Reject +from websockets.exceptions import InvalidHeader, InvalidOrigin, InvalidUpgrade +from websockets.http import USER_AGENT +from websockets.http11 import Request, Response +from websockets.server import * + +from .extensions.utils import ( + OpExtension, + Rsv2Extension, + ServerOpExtensionFactory, + ServerRsv2ExtensionFactory, +) +from .test_utils import ACCEPT, KEY +from .utils import DATE + + +class ConnectTests(unittest.TestCase): + def test_receive_connect(self): + server = ServerConnection() + [connect], bytes_to_send = server.receive_data( + ( + f"GET /test HTTP/1.1\r\n" + f"Host: example.com\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Key: {KEY}\r\n" + f"Sec-WebSocket-Version: 13\r\n" + f"User-Agent: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + self.assertIsInstance(connect, Connect) + self.assertEqual(bytes_to_send, b"") + + def test_connect_request(self): + server = ServerConnection() + [connect], bytes_to_send = server.receive_data( + ( + f"GET /test HTTP/1.1\r\n" + f"Host: example.com\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Key: {KEY}\r\n" + f"Sec-WebSocket-Version: 13\r\n" + f"User-Agent: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + self.assertEqual(connect.request.path, "/test") + self.assertEqual( + connect.request.headers, + Headers( + { + "Host": "example.com", + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Key": KEY, + "Sec-WebSocket-Version": "13", + "User-Agent": USER_AGENT, + } + ), + ) + + +class AcceptRejectTests(unittest.TestCase): + def make_connect_request(self): + return Request( + path="/test", + headers=Headers( + { + "Host": "example.com", + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Key": KEY, + "Sec-WebSocket-Version": "13", + "User-Agent": USER_AGENT, + } + ), + ) + + def test_send_accept(self): + server = ServerConnection() + with unittest.mock.patch("email.utils.formatdate", return_value=DATE): + accept = server.accept(Connect(self.make_connect_request())) + self.assertIsInstance(accept, Accept) + bytes_to_send = server.send(accept) + self.assertEqual( + bytes_to_send, + ( + f"HTTP/1.1 101 Switching Protocols\r\n" + f"Upgrade: websocket\r\n" + f"Connection: Upgrade\r\n" + f"Sec-WebSocket-Accept: {ACCEPT}\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"\r\n" + ).encode(), + ) + self.assertEqual(server.state, OPEN) + + def test_send_reject(self): + server = ServerConnection() + with unittest.mock.patch("email.utils.formatdate", return_value=DATE): + reject = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") + self.assertIsInstance(reject, Reject) + bytes_to_send = server.send(reject) + self.assertEqual( + bytes_to_send, + ( + f"HTTP/1.1 404 Not Found\r\n" + f"Date: {DATE}\r\n" + f"Server: {USER_AGENT}\r\n" + f"Content-Length: 13\r\n" + f"Content-Type: text/plain; charset=utf-8\r\n" + f"Connection: close\r\n" + f"\r\n" + f"Sorry folks.\n" + ).encode(), + ) + self.assertEqual(server.state, CONNECTING) + + def test_accept_response(self): + server = ServerConnection() + with unittest.mock.patch("email.utils.formatdate", return_value=DATE): + accept = server.accept(Connect(self.make_connect_request())) + self.assertIsInstance(accept.response, Response) + self.assertEqual(accept.response.status_code, 101) + self.assertEqual(accept.response.reason_phrase, "Switching Protocols") + self.assertEqual( + accept.response.headers, + Headers( + { + "Upgrade": "websocket", + "Connection": "Upgrade", + "Sec-WebSocket-Accept": ACCEPT, + "Date": DATE, + "Server": USER_AGENT, + } + ), + ) + self.assertIsNone(accept.response.body) + + def test_reject_response(self): + server = ServerConnection() + with unittest.mock.patch("email.utils.formatdate", return_value=DATE): + reject = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") + self.assertIsInstance(reject.response, Response) + self.assertEqual(reject.response.status_code, 404) + self.assertEqual(reject.response.reason_phrase, "Not Found") + self.assertEqual( + reject.response.headers, + Headers( + { + "Date": DATE, + "Server": USER_AGENT, + "Content-Length": "13", + "Content-Type": "text/plain; charset=utf-8", + "Connection": "close", + } + ), + ) + self.assertEqual(reject.response.body, b"Sorry folks.\n") + + def test_basic(self): + server = ServerConnection() + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + + def test_unexpected_exception(self): + server = ServerConnection() + request = self.make_connect_request() + with unittest.mock.patch( + "websockets.server.ServerConnection.process_request", + side_effect=Exception("BOOM"), + ): + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 500) + with self.assertRaises(Exception) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "BOOM") + + def test_missing_connection(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Connection"] + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 426) + self.assertEqual(reject.response.headers["Upgrade"], "websocket") + with self.assertRaises(InvalidUpgrade) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Connection header") + + def test_invalid_connection(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Connection"] + request.headers["Connection"] = "close" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 426) + self.assertEqual(reject.response.headers["Upgrade"], "websocket") + with self.assertRaises(InvalidUpgrade) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "invalid Connection header: close") + + def test_missing_upgrade(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Upgrade"] + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 426) + self.assertEqual(reject.response.headers["Upgrade"], "websocket") + with self.assertRaises(InvalidUpgrade) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Upgrade header") + + def test_invalid_upgrade(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Upgrade"] + request.headers["Upgrade"] = "h2c" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 426) + self.assertEqual(reject.response.headers["Upgrade"], "websocket") + with self.assertRaises(InvalidUpgrade) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "invalid Upgrade header: h2c") + + def test_missing_key(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Sec-WebSocket-Key"] + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Key header") + + def test_multiple_key(self): + server = ServerConnection() + request = self.make_connect_request() + request.headers["Sec-WebSocket-Key"] = KEY + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "invalid Sec-WebSocket-Key header: " + "more than one Sec-WebSocket-Key header found", + ) + + def test_invalid_key(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Sec-WebSocket-Key"] + request.headers["Sec-WebSocket-Key"] = "not Base64 data!" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), "invalid Sec-WebSocket-Key header: not Base64 data!" + ) + + def test_truncated_key(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Sec-WebSocket-Key"] + request.headers["Sec-WebSocket-Key"] = KEY[ + :16 + ] # 12 bytes instead of 16, Base64-encoded + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), f"invalid Sec-WebSocket-Key header: {KEY[:16]}" + ) + + def test_missing_version(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Sec-WebSocket-Version"] + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Version header") + + def test_multiple_version(self): + server = ServerConnection() + request = self.make_connect_request() + request.headers["Sec-WebSocket-Version"] = "11" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "invalid Sec-WebSocket-Version header: " + "more than one Sec-WebSocket-Version header found", + ) + + def test_invalid_version(self): + server = ServerConnection() + request = self.make_connect_request() + del request.headers["Sec-WebSocket-Version"] + request.headers["Sec-WebSocket-Version"] = "11" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), "invalid Sec-WebSocket-Version header: 11" + ) + + def test_no_origin(self): + server = ServerConnection(origins=["https://example.com"]) + request = self.make_connect_request() + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 403) + with self.assertRaises(InvalidOrigin) as raised: + raise reject.exception + self.assertEqual(str(raised.exception), "missing Origin header") + + def test_origin(self): + server = ServerConnection(origins=["https://example.com"]) + request = self.make_connect_request() + request.headers["Origin"] = "https://example.com" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(server.origin, "https://example.com") + + def test_unexpected_origin(self): + server = ServerConnection(origins=["https://example.com"]) + request = self.make_connect_request() + request.headers["Origin"] = "https://other.example.com" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 403) + with self.assertRaises(InvalidOrigin) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), "invalid Origin header: https://other.example.com" + ) + + def test_multiple_origin(self): + server = ServerConnection( + origins=["https://example.com", "https://other.example.com"] + ) + request = self.make_connect_request() + request.headers["Origin"] = "https://example.com" + request.headers["Origin"] = "https://other.example.com" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + # This is prohibited by the HTTP specification, so the return code is + # 400 Bad Request rather than 403 Forbidden. + self.assertEqual(reject.response.status_code, 400) + with self.assertRaises(InvalidHeader) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), + "invalid Origin header: more than one Origin header found", + ) + + def test_supported_origin(self): + server = ServerConnection( + origins=["https://example.com", "https://other.example.com"] + ) + request = self.make_connect_request() + request.headers["Origin"] = "https://other.example.com" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(server.origin, "https://other.example.com") + + def test_unsupported_origin(self): + server = ServerConnection( + origins=["https://example.com", "https://other.example.com"] + ) + request = self.make_connect_request() + request.headers["Origin"] = "https://original.example.com" + reject = server.accept(Connect(request)) + + self.assertIsInstance(reject, Reject) + self.assertEqual(reject.response.status_code, 403) + with self.assertRaises(InvalidOrigin) as raised: + raise reject.exception + self.assertEqual( + str(raised.exception), "invalid Origin header: https://original.example.com" + ) + + def test_no_origin_accepted(self): + server = ServerConnection(origins=[None]) + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertIsNone(server.origin) + + def test_no_extensions(self): + server = ServerConnection() + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(server.extensions, []) + + def test_no_extension(self): + server = ServerConnection(extensions=[ServerOpExtensionFactory()]) + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(server.extensions, []) + + def test_extension(self): + server = ServerConnection(extensions=[ServerOpExtensionFactory()]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual( + accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op" + ) + self.assertEqual(server.extensions, [OpExtension()]) + + def test_unexpected_extension(self): + server = ServerConnection() + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(server.extensions, []) + + def test_unsupported_extension(self): + server = ServerConnection(extensions=[ServerRsv2ExtensionFactory()]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(server.extensions, []) + + def test_supported_extension_parameters(self): + server = ServerConnection(extensions=[ServerOpExtensionFactory("this")]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op=this" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual( + accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op=this" + ) + self.assertEqual(server.extensions, [OpExtension("this")]) + + def test_unsupported_extension_parameters(self): + server = ServerConnection(extensions=[ServerOpExtensionFactory("this")]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(server.extensions, []) + + def test_multiple_supported_extension_parameters(self): + server = ServerConnection( + extensions=[ + ServerOpExtensionFactory("this"), + ServerOpExtensionFactory("that"), + ] + ) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual( + accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op=that" + ) + self.assertEqual(server.extensions, [OpExtension("that")]) + + def test_multiple_extensions(self): + server = ServerConnection( + extensions=[ServerOpExtensionFactory(), ServerRsv2ExtensionFactory()] + ) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-op; op" + request.headers["Sec-WebSocket-Extensions"] = "x-rsv2" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual( + accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op, x-rsv2" + ) + self.assertEqual(server.extensions, [OpExtension(), Rsv2Extension()]) + + def test_multiple_extensions_order(self): + server = ServerConnection( + extensions=[ServerOpExtensionFactory(), ServerRsv2ExtensionFactory()] + ) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Extensions"] = "x-rsv2" + request.headers["Sec-WebSocket-Extensions"] = "x-op; op" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual( + accept.response.headers["Sec-WebSocket-Extensions"], "x-rsv2, x-op; op" + ) + self.assertEqual(server.extensions, [Rsv2Extension(), OpExtension()]) + + def test_no_subprotocols(self): + server = ServerConnection() + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertIsNone(server.subprotocol) + + def test_no_subprotocol(self): + server = ServerConnection(subprotocols=["chat"]) + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertIsNone(server.subprotocol) + + def test_subprotocol(self): + server = ServerConnection(subprotocols=["chat"]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Protocol"] = "chat" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "chat") + self.assertEqual(server.subprotocol, "chat") + + def test_unexpected_subprotocol(self): + server = ServerConnection() + request = self.make_connect_request() + request.headers["Sec-WebSocket-Protocol"] = "chat" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertIsNone(server.subprotocol) + + def test_multiple_subprotocols(self): + server = ServerConnection(subprotocols=["superchat", "chat"]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Protocol"] = "superchat" + request.headers["Sec-WebSocket-Protocol"] = "chat" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "superchat") + self.assertEqual(server.subprotocol, "superchat") + + def test_supported_subprotocol(self): + server = ServerConnection(subprotocols=["superchat", "chat"]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Protocol"] = "chat" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "chat") + self.assertEqual(server.subprotocol, "chat") + + def test_unsupported_subprotocol(self): + server = ServerConnection(subprotocols=["superchat", "chat"]) + request = self.make_connect_request() + request.headers["Sec-WebSocket-Protocol"] = "otherchat" + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertIsNone(server.subprotocol) + + def test_extra_headers(self): + for extra_headers in [ + Headers({"X-Spam": "Eggs"}), + {"X-Spam": "Eggs"}, + [("X-Spam", "Eggs")], + lambda path, headers: Headers({"X-Spam": "Eggs"}), + lambda path, headers: {"X-Spam": "Eggs"}, + lambda path, headers: [("X-Spam", "Eggs")], + ]: + with self.subTest(extra_headers=extra_headers): + server = ServerConnection(extra_headers=extra_headers) + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(accept.response.headers["X-Spam"], "Eggs") + + def test_extra_headers_overrides_server(self): + server = ServerConnection(extra_headers={"Server": "Other"}) + request = self.make_connect_request() + accept = server.accept(Connect(request)) + + self.assertIsInstance(accept, Accept) + self.assertEqual(accept.response.headers["Server"], "Other") diff --git a/tests/utils.py b/tests/utils.py index bbffa8649..790d25687 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,5 +1,6 @@ import asyncio import contextlib +import email.utils import functools import logging import os @@ -7,6 +8,9 @@ import unittest +DATE = email.utils.formatdate(usegmt=True) + + class GeneratorTestCase(unittest.TestCase): def assertGeneratorRunning(self, gen): """ From 1033db5d402ed3a241356f97d642cda0df82ce45 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 21 Jun 2020 10:48:43 +0200 Subject: [PATCH 207/281] Drop Event class. It was too thin. It didn't add any value. Using the same abstractions for connection events and wire messages is good enough for our purposes. --- src/websockets/client.py | 31 ++- src/websockets/connection.py | 31 ++- src/websockets/events.py | 27 --- src/websockets/http11.py | 3 + src/websockets/protocol.py | 4 +- src/websockets/server.py | 67 +++--- tests/test_client.py | 190 +++++++++-------- tests/test_server.py | 385 ++++++++++++++++------------------- 8 files changed, 341 insertions(+), 397 deletions(-) delete mode 100644 src/websockets/events.py diff --git a/src/websockets/client.py b/src/websockets/client.py index ec4eb88f5..50203f27c 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -5,7 +5,6 @@ from .asyncio_client import WebSocketClientProtocol, connect, unix_connect from .connection import CLIENT, CONNECTING, OPEN, Connection from .datastructures import Headers, HeadersLike, MultipleValuesError -from .events import Accept, Connect, Event, Reject from .exceptions import ( InvalidHandshake, InvalidHeader, @@ -67,9 +66,9 @@ def __init__( self.extra_headers = extra_headers self.key = generate_key() - def connect(self) -> Connect: + def connect(self) -> Request: """ - Create a Connect event to send to the server. + Create a WebSocket handshake request event to send to the server. """ headers = Headers() @@ -114,8 +113,7 @@ def connect(self) -> Connect: headers.setdefault("User-Agent", USER_AGENT) - request = Request(self.wsuri.resource_name, headers) - return Connect(request) + return Request(self.wsuri.resource_name, headers) def process_response(self, response: Response) -> None: """ @@ -153,13 +151,13 @@ def process_response(self, response: Response) -> None: try: s_w_accept = headers["Sec-WebSocket-Accept"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Accept") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Accept") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found", - ) + ) from exc if s_w_accept != accept_key(self.key): raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) @@ -273,11 +271,11 @@ def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: return subprotocol - def send_in_connecting_state(self, event: Event) -> bytes: - assert isinstance(event, Connect) - - request = event.request + def send_request(self, request: Request) -> bytes: + """ + Convert a WebSocket handshake request to bytes to send to the server. + """ logger.debug("%s > GET %s HTTP/1.1", self.side, request.path) logger.debug("%s > %r", self.side, request.headers) @@ -291,9 +289,10 @@ def parse(self) -> Generator[None, None, None]: try: self.process_response(response) except InvalidHandshake as exc: - self.events.append(Reject(response, exc)) - return + response = response._replace(exception=exc) + logger.debug("Invalid handshake", exc_info=True) else: - self.events.append(Accept(response)) self.state = OPEN + finally: + self.events.append(response) yield from super().parse() diff --git a/src/websockets/connection.py b/src/websockets/connection.py index 5789b6ea1..ac9aedd6b 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -1,14 +1,18 @@ import enum -from typing import Generator, Iterable, List, Tuple +from typing import Generator, List, Tuple, Union -from .events import Event from .exceptions import InvalidState +from .frames import Frame +from .http11 import Request, Response from .streams import StreamReader __all__ = ["Connection"] +Event = Union[Request, Response, Frame] + + # A WebSocket connection is either a server or a client. @@ -46,43 +50,38 @@ def __init__(self, state: State = OPEN) -> None: # Public APIs for receiving data and producing events - def receive_data(self, data: bytes) -> Tuple[Iterable[Event], bytes]: + def receive_data(self, data: bytes) -> Tuple[List[Event], List[bytes]]: self.reader.feed_data(data) return self.receive() - def receive_eof(self) -> Tuple[Iterable[Event], bytes]: + def receive_eof(self) -> Tuple[List[Event], List[bytes]]: self.reader.feed_eof() return self.receive() # Public APIs for receiving events and producing data - def send(self, event: Event) -> bytes: + def send_frame(self, frame: Frame) -> bytes: """ - Send an event to the remote endpoint. + Convert a WebSocket handshake response to bytes to send. """ - if self.state == OPEN: - raise NotImplementedError # not implemented yet - elif self.state == CONNECTING: - return self.send_in_connecting_state(event) - else: + # Defensive assertion for protocol compliance. + if self.state != OPEN: raise InvalidState( f"Cannot write to a WebSocket in the {self.state.name} state" ) + raise NotImplementedError # not implemented yet # Private APIs - def send_in_connecting_state(self, event: Event) -> bytes: - raise NotImplementedError - - def receive(self) -> Tuple[List[Event], bytes]: + def receive(self) -> Tuple[List[Event], List[bytes]]: # Run parser until more data is needed or EOF try: next(self.parser) except StopIteration: pass events, self.events = self.events, [] - return events, b"" + return events, [] def parse(self) -> Generator[None, None, None]: yield # not implemented yet diff --git a/src/websockets/events.py b/src/websockets/events.py deleted file mode 100644 index 196de9421..000000000 --- a/src/websockets/events.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import NamedTuple, Optional, Union - -from .http11 import Request, Response - - -__all__ = [ - "Accept", - "Connect", - "Event", - "Reject", -] - - -class Connect(NamedTuple): - request: Request - - -class Accept(NamedTuple): - response: Response - - -class Reject(NamedTuple): - response: Response - exception: Optional[Exception] - - -Event = Union[Connect, Accept, Reject] diff --git a/src/websockets/http11.py b/src/websockets/http11.py index e1d004881..58ee09253 100644 --- a/src/websockets/http11.py +++ b/src/websockets/http11.py @@ -127,6 +127,9 @@ class Response(NamedTuple): headers: Headers body: Optional[bytes] = None + # If processing the response triggers an exception, it's stored here. + exception: Optional[Exception] = None + @classmethod def parse( cls, diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 748c1ae66..58c4569d0 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -53,7 +53,7 @@ serialize_close, ) from .framing import Frame -from .typing import Data +from .typing import Data, Subprotocol __all__ = ["WebSocketCommonProtocol"] @@ -261,7 +261,7 @@ def __init__( # WebSocket protocol parameters. self.extensions: List[Extension] = [] - self.subprotocol: Optional[str] = None + self.subprotocol: Optional[Subprotocol] = None # The close code and reason are set when receiving a close frame or # losing the TCP connection. diff --git a/src/websockets/server.py b/src/websockets/server.py index f668ff5e7..095d9a17d 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -9,7 +9,6 @@ from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve from .connection import CONNECTING, OPEN, SERVER, Connection from .datastructures import Headers, HeadersLike, MultipleValuesError -from .events import Accept, Connect, Event, Reject from .exceptions import ( InvalidHandshake, InvalidHeader, @@ -69,15 +68,17 @@ def __init__( self.available_subprotocols = subprotocols self.extra_headers = extra_headers - def accept(self, connect: Connect) -> Union[Accept, Reject]: + def accept(self, request: Request) -> Response: """ - Create an ``Accept`` or ``Reject`` event to send to the client. + Create a WebSocket handshake response event to send to the client. - If the connection cannot be established, this method returns a - :class:`~websockets.events.Reject` event, which may be unexpected. + If the connection cannot be established, the response rejects the + connection, which may be unexpected. """ - request = connect.request + # TODO: when changing Request to a dataclass, set the exception + # attribute on the request rather than the Response, which will + # be semantically more correct. try: key, extensions_header, protocol_header = self.process_request(request) except InvalidOrigin as exc: @@ -85,8 +86,7 @@ def accept(self, connect: Connect) -> Union[Accept, Reject]: return self.reject( http.HTTPStatus.FORBIDDEN, f"Failed to open a WebSocket connection: {exc}.\n", - exception=exc, - ) + )._replace(exception=exc) except InvalidUpgrade as exc: logger.debug("Invalid upgrade", exc_info=True) return self.reject( @@ -98,15 +98,13 @@ def accept(self, connect: Connect) -> Union[Accept, Reject]: f"with a browser. You need a WebSocket client.\n" ), headers=Headers([("Upgrade", "websocket")]), - exception=exc, - ) + )._replace(exception=exc) except InvalidHandshake as exc: logger.debug("Invalid handshake", exc_info=True) return self.reject( http.HTTPStatus.BAD_REQUEST, f"Failed to open a WebSocket connection: {exc}.\n", - exception=exc, - ) + )._replace(exception=exc) except Exception as exc: logger.warning("Error in opening handshake", exc_info=True) return self.reject( @@ -115,8 +113,7 @@ def accept(self, connect: Connect) -> Union[Accept, Reject]: "Failed to open a WebSocket connection.\n" "See server log for more information.\n" ), - exception=exc, - ) + )._replace(exception=exc) headers = Headers() @@ -146,8 +143,7 @@ def accept(self, connect: Connect) -> Union[Accept, Reject]: headers.setdefault("Date", email.utils.formatdate(usegmt=True)) headers.setdefault("Server", USER_AGENT) - response = Response(101, "Switching Protocols", headers) - return Accept(response) + return Response(101, "Switching Protocols", headers) def process_request( self, request: Request @@ -189,29 +185,29 @@ def process_request( try: key = headers["Sec-WebSocket-Key"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Key") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Key") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" - ) + ) from exc try: raw_key = base64.b64decode(key.encode(), validate=True) - except binascii.Error: - raise InvalidHeaderValue("Sec-WebSocket-Key", key) + except binascii.Error as exc: + raise InvalidHeaderValue("Sec-WebSocket-Key", key) from exc if len(raw_key) != 16: raise InvalidHeaderValue("Sec-WebSocket-Key", key) try: version = headers["Sec-WebSocket-Version"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Version") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Version") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found", - ) + ) from exc if version != "13": raise InvalidHeaderValue("Sec-WebSocket-Version", version) @@ -389,9 +385,9 @@ def reject( text: str, headers: Optional[Headers] = None, exception: Optional[Exception] = None, - ) -> Reject: + ) -> Response: """ - Create a ``Reject`` event to send to the client. + Create a HTTP response event to send to the client. A short plain text response is the best fallback when failing to establish a WebSocket connection. @@ -405,17 +401,16 @@ def reject( headers.setdefault("Content-Length", str(len(body))) headers.setdefault("Content-Type", "text/plain; charset=utf-8") headers.setdefault("Connection", "close") - response = Response(status.value, status.phrase, headers, body) - return Reject(response, exception) + return Response(status.value, status.phrase, headers, body) - def send_in_connecting_state(self, event: Event) -> bytes: - assert isinstance(event, (Accept, Reject)) + def send_response(self, response: Response) -> bytes: + """ + Convert a WebSocket handshake response to bytes to send to the client. - if isinstance(event, Accept): + """ + if response.status_code == 101: self.state = OPEN - response = event.response - logger.debug( "%s > HTTP/1.1 %d %s", self.side, @@ -431,5 +426,5 @@ def send_in_connecting_state(self, event: Event) -> bytes: def parse(self) -> Generator[None, None, None]: request = yield from Request.parse(self.reader.read_line) assert self.state == CONNECTING - self.events.append(Connect(request)) + self.events.append(request) yield from super().parse() diff --git a/tests/test_client.py b/tests/test_client.py index 1cf27349d..eef8eb13e 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -4,7 +4,6 @@ from websockets.client import * from websockets.connection import CONNECTING, OPEN from websockets.datastructures import Headers -from websockets.events import Accept, Connect, Reject from websockets.exceptions import InvalidHandshake, InvalidHeader from websockets.http import USER_AGENT from websockets.http11 import Request, Response @@ -24,9 +23,9 @@ class ConnectTests(unittest.TestCase): def test_send_connect(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("wss://example.com/test") - connect = client.connect() - self.assertIsInstance(connect, Connect) - bytes_to_send = client.send(connect) + request = client.connect() + self.assertIsInstance(request, Request) + bytes_to_send = client.send_request(request) self.assertEqual( bytes_to_send, ( @@ -44,11 +43,10 @@ def test_send_connect(self): def test_connect_request(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("wss://example.com/test") - connect = client.connect() - self.assertIsInstance(connect.request, Request) - self.assertEqual(connect.request.path, "/test") + request = client.connect() + self.assertEqual(request.path, "/test") self.assertEqual( - connect.request.headers, + request.headers, Headers( { "Host": "example.com", @@ -63,7 +61,7 @@ def test_connect_request(self): def test_path(self): client = ClientConnection("wss://example.com/endpoint?test=1") - request = client.connect().request + request = client.connect() self.assertEqual(request.path, "/endpoint?test=1") @@ -78,19 +76,19 @@ def test_port(self): ]: with self.subTest(uri=uri): client = ClientConnection(uri) - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["Host"], host) def test_user_info(self): client = ClientConnection("wss://hello:iloveyou@example.com/") - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=") def test_origin(self): client = ClientConnection("wss://example.com/", origin="https://example.com") - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["Origin"], "https://example.com") @@ -98,13 +96,13 @@ def test_extensions(self): client = ClientConnection( "wss://example.com/", extensions=[ClientOpExtensionFactory()] ) - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["Sec-WebSocket-Extensions"], "x-op; op") def test_subprotocols(self): client = ClientConnection("wss://example.com/", subprotocols=["chat"]) - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["Sec-WebSocket-Protocol"], "chat") @@ -118,7 +116,7 @@ def test_extra_headers(self): client = ClientConnection( "wss://example.com/", extra_headers=extra_headers ) - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["X-Spam"], "Eggs") @@ -126,7 +124,7 @@ def test_extra_headers_overrides_user_agent(self): client = ClientConnection( "wss://example.com/", extra_headers={"User-Agent": "Other"} ) - request = client.connect().request + request = client.connect() self.assertEqual(request.headers["User-Agent"], "Other") @@ -136,7 +134,7 @@ def test_receive_accept(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [accept], bytes_to_send = client.receive_data( + [response], bytes_to_send = client.receive_data( ( f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" @@ -147,15 +145,15 @@ def test_receive_accept(self): f"\r\n" ).encode(), ) - self.assertIsInstance(accept, Accept) - self.assertEqual(bytes_to_send, b"") + self.assertIsInstance(response, Response) + self.assertEqual(bytes_to_send, []) self.assertEqual(client.state, OPEN) def test_receive_reject(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [reject], bytes_to_send = client.receive_data( + [response], bytes_to_send = client.receive_data( ( f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" @@ -167,15 +165,15 @@ def test_receive_reject(self): f"Sorry folks.\n" ).encode(), ) - self.assertIsInstance(reject, Reject) - self.assertEqual(bytes_to_send, b"") + self.assertIsInstance(response, Response) + self.assertEqual(bytes_to_send, []) self.assertEqual(client.state, CONNECTING) def test_accept_response(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [accept], _bytes_to_send = client.receive_data( + [response], _bytes_to_send = client.receive_data( ( f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" @@ -186,10 +184,10 @@ def test_accept_response(self): f"\r\n" ).encode(), ) - self.assertEqual(accept.response.status_code, 101) - self.assertEqual(accept.response.reason_phrase, "Switching Protocols") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.reason_phrase, "Switching Protocols") self.assertEqual( - accept.response.headers, + response.headers, Headers( { "Upgrade": "websocket", @@ -200,13 +198,13 @@ def test_accept_response(self): } ), ) - self.assertIsNone(accept.response.body) + self.assertIsNone(response.body) def test_reject_response(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [reject], _bytes_to_send = client.receive_data( + [response], _bytes_to_send = client.receive_data( ( f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" @@ -218,10 +216,10 @@ def test_reject_response(self): f"Sorry folks.\n" ).encode(), ) - self.assertEqual(reject.response.status_code, 404) - self.assertEqual(reject.response.reason_phrase, "Not Found") + self.assertEqual(response.status_code, 404) + self.assertEqual(response.reason_phrase, "Not Found") self.assertEqual( - reject.response.headers, + response.headers, Headers( { "Date": DATE, @@ -232,10 +230,10 @@ def test_reject_response(self): } ), ) - self.assertEqual(reject.response.body, b"Sorry folks.\n") + self.assertEqual(response.body, b"Sorry folks.\n") def make_accept_response(self, client): - request = client.connect().request + request = client.connect() return Response( status_code=101, reason_phrase="Switching Protocols", @@ -253,19 +251,19 @@ def make_accept_response(self, client): def test_basic(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) def test_missing_connection(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Connection"] - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Connection header") def test_invalid_connection(self): @@ -273,22 +271,22 @@ def test_invalid_connection(self): response = self.make_accept_response(client) del response.headers["Connection"] response.headers["Connection"] = "close" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "invalid Connection header: close") def test_missing_upgrade(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Upgrade"] - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Upgrade header") def test_invalid_upgrade(self): @@ -296,33 +294,33 @@ def test_invalid_upgrade(self): response = self.make_accept_response(client) del response.headers["Upgrade"] response.headers["Upgrade"] = "h2c" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "invalid Upgrade header: h2c") def test_missing_accept(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Sec-WebSocket-Accept"] - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Accept header") def test_multiple_accept(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Accept"] = ACCEPT - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Sec-WebSocket-Accept header: " @@ -334,11 +332,11 @@ def test_invalid_accept(self): response = self.make_accept_response(client) del response.headers["Sec-WebSocket-Accept"] response.headers["Sec-WebSocket-Accept"] = ACCEPT - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), f"invalid Sec-WebSocket-Accept header: {ACCEPT}" ) @@ -346,9 +344,9 @@ def test_invalid_accept(self): def test_no_extensions(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, []) def test_no_extension(self): @@ -357,9 +355,9 @@ def test_no_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension()]) def test_extension(self): @@ -368,20 +366,20 @@ def test_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [Rsv2Extension()]) def test_unexpected_extension(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "no extensions supported") def test_unsupported_extension(self): @@ -390,11 +388,11 @@ def test_unsupported_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "Unsupported extension: name = x-op, params = [('op', None)]", @@ -406,9 +404,9 @@ def test_supported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=this" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension("this")]) def test_unsupported_extension_parameters(self): @@ -417,11 +415,11 @@ def test_unsupported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "Unsupported extension: name = x-op, params = [('op', 'that')]", @@ -437,9 +435,9 @@ def test_multiple_supported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension("that")]) def test_multiple_extensions(self): @@ -450,9 +448,9 @@ def test_multiple_extensions(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension(), Rsv2Extension()]) def test_multiple_extensions_order(self): @@ -463,45 +461,45 @@ def test_multiple_extensions_order(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [Rsv2Extension(), OpExtension()]) def test_no_subprotocols(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertIsNone(client.subprotocol) def test_no_subprotocol(self): client = ClientConnection("wss://example.com/", subprotocols=["chat"]) response = self.make_accept_response(client) - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertIsNone(client.subprotocol) def test_subprotocol(self): client = ClientConnection("wss://example.com/", subprotocols=["chat"]) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.subprotocol, "chat") def test_unexpected_subprotocol(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "no subprotocols supported") def test_multiple_subprotocols(self): @@ -511,11 +509,11 @@ def test_multiple_subprotocols(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "superchat" response.headers["Sec-WebSocket-Protocol"] = "chat" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "multiple subprotocols: superchat, chat" ) @@ -526,9 +524,9 @@ def test_supported_subprotocol(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [accept], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(accept, Accept) + self.assertEqual(client.state, OPEN) self.assertEqual(client.subprotocol, "chat") def test_unsupported_subprotocol(self): @@ -537,9 +535,9 @@ def test_unsupported_subprotocol(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "otherchat" - [reject], _bytes_to_send = client.receive_data(response.serialize()) + [response], _bytes_to_send = client.receive_data(response.serialize()) - self.assertIsInstance(reject, Reject) + self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "unsupported subprotocol: otherchat") diff --git a/tests/test_server.py b/tests/test_server.py index 1d094a86d..8b00cec11 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -4,7 +4,6 @@ from websockets.connection import CONNECTING, OPEN from websockets.datastructures import Headers -from websockets.events import Accept, Connect, Reject from websockets.exceptions import InvalidHeader, InvalidOrigin, InvalidUpgrade from websockets.http import USER_AGENT from websockets.http11 import Request, Response @@ -23,7 +22,7 @@ class ConnectTests(unittest.TestCase): def test_receive_connect(self): server = ServerConnection() - [connect], bytes_to_send = server.receive_data( + [request], bytes_to_send = server.receive_data( ( f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" @@ -35,12 +34,12 @@ def test_receive_connect(self): f"\r\n" ).encode(), ) - self.assertIsInstance(connect, Connect) - self.assertEqual(bytes_to_send, b"") + self.assertIsInstance(request, Request) + self.assertEqual(bytes_to_send, []) def test_connect_request(self): server = ServerConnection() - [connect], bytes_to_send = server.receive_data( + [request], bytes_to_send = server.receive_data( ( f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" @@ -52,9 +51,9 @@ def test_connect_request(self): f"\r\n" ).encode(), ) - self.assertEqual(connect.request.path, "/test") + self.assertEqual(request.path, "/test") self.assertEqual( - connect.request.headers, + request.headers, Headers( { "Host": "example.com", @@ -69,7 +68,7 @@ def test_connect_request(self): class AcceptRejectTests(unittest.TestCase): - def make_connect_request(self): + def make_request(self): return Request( path="/test", headers=Headers( @@ -87,9 +86,9 @@ def make_connect_request(self): def test_send_accept(self): server = ServerConnection() with unittest.mock.patch("email.utils.formatdate", return_value=DATE): - accept = server.accept(Connect(self.make_connect_request())) - self.assertIsInstance(accept, Accept) - bytes_to_send = server.send(accept) + response = server.accept(self.make_request()) + self.assertIsInstance(response, Response) + bytes_to_send = server.send_response(response) self.assertEqual( bytes_to_send, ( @@ -107,9 +106,9 @@ def test_send_accept(self): def test_send_reject(self): server = ServerConnection() with unittest.mock.patch("email.utils.formatdate", return_value=DATE): - reject = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") - self.assertIsInstance(reject, Reject) - bytes_to_send = server.send(reject) + response = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") + self.assertIsInstance(response, Response) + bytes_to_send = server.send_response(response) self.assertEqual( bytes_to_send, ( @@ -128,12 +127,12 @@ def test_send_reject(self): def test_accept_response(self): server = ServerConnection() with unittest.mock.patch("email.utils.formatdate", return_value=DATE): - accept = server.accept(Connect(self.make_connect_request())) - self.assertIsInstance(accept.response, Response) - self.assertEqual(accept.response.status_code, 101) - self.assertEqual(accept.response.reason_phrase, "Switching Protocols") + response = server.accept(self.make_request()) + self.assertIsInstance(response, Response) + self.assertEqual(response.status_code, 101) + self.assertEqual(response.reason_phrase, "Switching Protocols") self.assertEqual( - accept.response.headers, + response.headers, Headers( { "Upgrade": "websocket", @@ -144,17 +143,17 @@ def test_accept_response(self): } ), ) - self.assertIsNone(accept.response.body) + self.assertIsNone(response.body) def test_reject_response(self): server = ServerConnection() with unittest.mock.patch("email.utils.formatdate", return_value=DATE): - reject = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") - self.assertIsInstance(reject.response, Response) - self.assertEqual(reject.response.status_code, 404) - self.assertEqual(reject.response.reason_phrase, "Not Found") + response = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") + self.assertIsInstance(response, Response) + self.assertEqual(response.status_code, 404) + self.assertEqual(response.reason_phrase, "Not Found") self.assertEqual( - reject.response.headers, + response.headers, Headers( { "Date": DATE, @@ -165,106 +164,99 @@ def test_reject_response(self): } ), ) - self.assertEqual(reject.response.body, b"Sorry folks.\n") + self.assertEqual(response.body, b"Sorry folks.\n") def test_basic(self): server = ServerConnection() - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) def test_unexpected_exception(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() with unittest.mock.patch( "websockets.server.ServerConnection.process_request", side_effect=Exception("BOOM"), ): - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 500) + self.assertEqual(response.status_code, 500) with self.assertRaises(Exception) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "BOOM") def test_missing_connection(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Connection"] - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 426) - self.assertEqual(reject.response.headers["Upgrade"], "websocket") + self.assertEqual(response.status_code, 426) + self.assertEqual(response.headers["Upgrade"], "websocket") with self.assertRaises(InvalidUpgrade) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Connection header") def test_invalid_connection(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Connection"] request.headers["Connection"] = "close" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 426) - self.assertEqual(reject.response.headers["Upgrade"], "websocket") + self.assertEqual(response.status_code, 426) + self.assertEqual(response.headers["Upgrade"], "websocket") with self.assertRaises(InvalidUpgrade) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "invalid Connection header: close") def test_missing_upgrade(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Upgrade"] - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 426) - self.assertEqual(reject.response.headers["Upgrade"], "websocket") + self.assertEqual(response.status_code, 426) + self.assertEqual(response.headers["Upgrade"], "websocket") with self.assertRaises(InvalidUpgrade) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Upgrade header") def test_invalid_upgrade(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Upgrade"] request.headers["Upgrade"] = "h2c" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 426) - self.assertEqual(reject.response.headers["Upgrade"], "websocket") + self.assertEqual(response.status_code, 426) + self.assertEqual(response.headers["Upgrade"], "websocket") with self.assertRaises(InvalidUpgrade) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "invalid Upgrade header: h2c") def test_missing_key(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Sec-WebSocket-Key"] - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Key header") def test_multiple_key(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Key"] = KEY - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Sec-WebSocket-Key header: " @@ -273,58 +265,54 @@ def test_multiple_key(self): def test_invalid_key(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Sec-WebSocket-Key"] request.headers["Sec-WebSocket-Key"] = "not Base64 data!" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Sec-WebSocket-Key header: not Base64 data!" ) def test_truncated_key(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Sec-WebSocket-Key"] request.headers["Sec-WebSocket-Key"] = KEY[ :16 ] # 12 bytes instead of 16, Base64-encoded - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), f"invalid Sec-WebSocket-Key header: {KEY[:16]}" ) def test_missing_version(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Sec-WebSocket-Version"] - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Sec-WebSocket-Version header") def test_multiple_version(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Version"] = "11" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Sec-WebSocket-Version header: " @@ -333,49 +321,46 @@ def test_multiple_version(self): def test_invalid_version(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() del request.headers["Sec-WebSocket-Version"] request.headers["Sec-WebSocket-Version"] = "11" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Sec-WebSocket-Version header: 11" ) def test_no_origin(self): server = ServerConnection(origins=["https://example.com"]) - request = self.make_connect_request() - reject = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 403) + self.assertEqual(response.status_code, 403) with self.assertRaises(InvalidOrigin) as raised: - raise reject.exception + raise response.exception self.assertEqual(str(raised.exception), "missing Origin header") def test_origin(self): server = ServerConnection(origins=["https://example.com"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Origin"] = "https://example.com" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) self.assertEqual(server.origin, "https://example.com") def test_unexpected_origin(self): server = ServerConnection(origins=["https://example.com"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Origin"] = "https://other.example.com" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 403) + self.assertEqual(response.status_code, 403) with self.assertRaises(InvalidOrigin) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Origin header: https://other.example.com" ) @@ -384,17 +369,16 @@ def test_multiple_origin(self): server = ServerConnection( origins=["https://example.com", "https://other.example.com"] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Origin"] = "https://example.com" request.headers["Origin"] = "https://other.example.com" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) # This is prohibited by the HTTP specification, so the return code is # 400 Bad Request rather than 403 Forbidden. - self.assertEqual(reject.response.status_code, 400) + self.assertEqual(response.status_code, 400) with self.assertRaises(InvalidHeader) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Origin header: more than one Origin header found", @@ -404,107 +388,102 @@ def test_supported_origin(self): server = ServerConnection( origins=["https://example.com", "https://other.example.com"] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Origin"] = "https://other.example.com" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) self.assertEqual(server.origin, "https://other.example.com") def test_unsupported_origin(self): server = ServerConnection( origins=["https://example.com", "https://other.example.com"] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Origin"] = "https://original.example.com" - reject = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(reject, Reject) - self.assertEqual(reject.response.status_code, 403) + self.assertEqual(response.status_code, 403) with self.assertRaises(InvalidOrigin) as raised: - raise reject.exception + raise response.exception self.assertEqual( str(raised.exception), "invalid Origin header: https://original.example.com" ) def test_no_origin_accepted(self): server = ServerConnection(origins=[None]) - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) self.assertIsNone(server.origin) def test_no_extensions(self): server = ServerConnection() - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Extensions", response.headers) self.assertEqual(server.extensions, []) def test_no_extension(self): server = ServerConnection(extensions=[ServerOpExtensionFactory()]) - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Extensions", response.headers) self.assertEqual(server.extensions, []) def test_extension(self): server = ServerConnection(extensions=[ServerOpExtensionFactory()]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual( - accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op" - ) + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Extensions"], "x-op; op") self.assertEqual(server.extensions, [OpExtension()]) def test_unexpected_extension(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Extensions", response.headers) self.assertEqual(server.extensions, []) def test_unsupported_extension(self): server = ServerConnection(extensions=[ServerRsv2ExtensionFactory()]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Extensions", response.headers) self.assertEqual(server.extensions, []) def test_supported_extension_parameters(self): server = ServerConnection(extensions=[ServerOpExtensionFactory("this")]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op=this" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual( - accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op=this" - ) + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Extensions"], "x-op; op=this") self.assertEqual(server.extensions, [OpExtension("this")]) def test_unsupported_extension_parameters(self): server = ServerConnection(extensions=[ServerOpExtensionFactory("this")]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Extensions", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Extensions", response.headers) self.assertEqual(server.extensions, []) def test_multiple_supported_extension_parameters(self): @@ -514,28 +493,26 @@ def test_multiple_supported_extension_parameters(self): ServerOpExtensionFactory("that"), ] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual( - accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op=that" - ) + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Extensions"], "x-op; op=that") self.assertEqual(server.extensions, [OpExtension("that")]) def test_multiple_extensions(self): server = ServerConnection( extensions=[ServerOpExtensionFactory(), ServerRsv2ExtensionFactory()] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-op; op" request.headers["Sec-WebSocket-Extensions"] = "x-rsv2" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) self.assertEqual( - accept.response.headers["Sec-WebSocket-Extensions"], "x-op; op, x-rsv2" + response.headers["Sec-WebSocket-Extensions"], "x-op; op, x-rsv2" ) self.assertEqual(server.extensions, [OpExtension(), Rsv2Extension()]) @@ -543,84 +520,84 @@ def test_multiple_extensions_order(self): server = ServerConnection( extensions=[ServerOpExtensionFactory(), ServerRsv2ExtensionFactory()] ) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Extensions"] = "x-rsv2" request.headers["Sec-WebSocket-Extensions"] = "x-op; op" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) + self.assertEqual(response.status_code, 101) self.assertEqual( - accept.response.headers["Sec-WebSocket-Extensions"], "x-rsv2, x-op; op" + response.headers["Sec-WebSocket-Extensions"], "x-rsv2, x-op; op" ) self.assertEqual(server.extensions, [Rsv2Extension(), OpExtension()]) def test_no_subprotocols(self): server = ServerConnection() - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Protocol", response.headers) self.assertIsNone(server.subprotocol) def test_no_subprotocol(self): server = ServerConnection(subprotocols=["chat"]) - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Protocol", response.headers) self.assertIsNone(server.subprotocol) def test_subprotocol(self): server = ServerConnection(subprotocols=["chat"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Protocol"] = "chat" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "chat") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Protocol"], "chat") self.assertEqual(server.subprotocol, "chat") def test_unexpected_subprotocol(self): server = ServerConnection() - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Protocol"] = "chat" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Protocol", response.headers) self.assertIsNone(server.subprotocol) def test_multiple_subprotocols(self): server = ServerConnection(subprotocols=["superchat", "chat"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Protocol"] = "superchat" request.headers["Sec-WebSocket-Protocol"] = "chat" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "superchat") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Protocol"], "superchat") self.assertEqual(server.subprotocol, "superchat") def test_supported_subprotocol(self): server = ServerConnection(subprotocols=["superchat", "chat"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Protocol"] = "chat" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual(accept.response.headers["Sec-WebSocket-Protocol"], "chat") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Sec-WebSocket-Protocol"], "chat") self.assertEqual(server.subprotocol, "chat") def test_unsupported_subprotocol(self): server = ServerConnection(subprotocols=["superchat", "chat"]) - request = self.make_connect_request() + request = self.make_request() request.headers["Sec-WebSocket-Protocol"] = "otherchat" - accept = server.accept(Connect(request)) + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertNotIn("Sec-WebSocket-Protocol", accept.response.headers) + self.assertEqual(response.status_code, 101) + self.assertNotIn("Sec-WebSocket-Protocol", response.headers) self.assertIsNone(server.subprotocol) def test_extra_headers(self): @@ -634,16 +611,16 @@ def test_extra_headers(self): ]: with self.subTest(extra_headers=extra_headers): server = ServerConnection(extra_headers=extra_headers) - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual(accept.response.headers["X-Spam"], "Eggs") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["X-Spam"], "Eggs") def test_extra_headers_overrides_server(self): server = ServerConnection(extra_headers={"Server": "Other"}) - request = self.make_connect_request() - accept = server.accept(Connect(request)) + request = self.make_request() + response = server.accept(request) - self.assertIsInstance(accept, Accept) - self.assertEqual(accept.response.headers["Server"], "Other") + self.assertEqual(response.status_code, 101) + self.assertEqual(response.headers["Server"], "Other") From f9177126eb6a6266c58345714ba75fdffd428802 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 11:50:50 +0200 Subject: [PATCH 208/281] Change Sans I/O model to handle exceptions. In the new model, receive_data returns nothing and raises an exception on errors. Events received and bytes to send are obtained through other method calls. --- src/websockets/client.py | 16 +++--- src/websockets/connection.py | 48 ++++++++++++++--- src/websockets/server.py | 23 ++++++--- tests/test_client.py | 99 +++++++++++++++++++++++------------- tests/test_server.py | 27 +++++----- 5 files changed, 143 insertions(+), 70 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index 50203f27c..d6250c7e9 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -1,6 +1,6 @@ import collections import logging -from typing import Generator, List, Optional, Sequence +from typing import Any, Generator, List, Optional, Sequence from .asyncio_client import WebSocketClientProtocol, connect, unix_connect from .connection import CLIENT, CONNECTING, OPEN, Connection @@ -47,9 +47,6 @@ class ClientConnection(Connection): - - side = CLIENT - def __init__( self, uri: str, @@ -57,8 +54,9 @@ def __init__( extensions: Optional[Sequence[ClientExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, + **kwargs: Any, ): - super().__init__(state=CONNECTING) + super().__init__(side=CLIENT, state=CONNECTING, **kwargs) self.wsuri = parse_uri(uri) self.origin = origin self.available_extensions = extensions @@ -271,15 +269,15 @@ def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]: return subprotocol - def send_request(self, request: Request) -> bytes: + def send_request(self, request: Request) -> None: """ - Convert a WebSocket handshake request to bytes to send to the server. + Send a WebSocket handshake request to the server. """ logger.debug("%s > GET %s HTTP/1.1", self.side, request.path) logger.debug("%s > %r", self.side, request.headers) - return request.serialize() + self.writes.append(request.serialize()) def parse(self) -> Generator[None, None, None]: response = yield from Response.parse( @@ -292,7 +290,7 @@ def parse(self) -> Generator[None, None, None]: response = response._replace(exception=exc) logger.debug("Invalid handshake", exc_info=True) else: - self.state = OPEN + self.set_state(OPEN) finally: self.events.append(response) yield from super().parse() diff --git a/src/websockets/connection.py b/src/websockets/connection.py index ac9aedd6b..616f2b3c2 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -1,5 +1,5 @@ import enum -from typing import Generator, List, Tuple, Union +from typing import Any, Generator, List, Tuple, Union from .exceptions import InvalidState from .frames import Frame @@ -41,22 +41,27 @@ class Connection: side: Side - def __init__(self, state: State = OPEN) -> None: + def __init__(self, side: Side, state: State = OPEN, **kwargs: Any) -> None: + self.side = side self.state = state self.reader = StreamReader() self.events: List[Event] = [] + self.writes: List[bytes] = [] self.parser = self.parse() next(self.parser) # start coroutine + def set_state(self, state: State) -> None: + self.state = state + # Public APIs for receiving data and producing events - def receive_data(self, data: bytes) -> Tuple[List[Event], List[bytes]]: + def receive_data(self, data: bytes) -> None: self.reader.feed_data(data) - return self.receive() + self.step_parser() - def receive_eof(self) -> Tuple[List[Event], List[bytes]]: + def receive_eof(self) -> None: self.reader.feed_eof() - return self.receive() + self.step_parser() # Public APIs for receiving events and producing data @@ -72,6 +77,34 @@ def send_frame(self, frame: Frame) -> bytes: ) raise NotImplementedError # not implemented yet + # Public API for getting incoming events after receiving data. + + def events_received(self) -> List[Event]: + """ + Return events read from the connection. + + Call this method immediately after calling any of the ``receive_*()`` + methods and process the events. + + """ + events, self.events = self.events, [] + return events + + # Public API for getting outgoing data after receiving data or sending events. + + def bytes_to_send(self) -> List[bytes]: + """ + Return data to write to the connection. + + Call this method immediately after calling any of the ``receive_*()`` + or ``send_*()`` methods and write the data to the connection. + + The empty bytestring signals the end of the data stream. + + """ + writes, self.writes = self.writes, [] + return writes + # Private APIs def receive(self) -> Tuple[List[Event], List[bytes]]: @@ -83,5 +116,8 @@ def receive(self) -> Tuple[List[Event], List[bytes]]: events, self.events = self.events, [] return events, [] + def step_parser(self) -> None: + next(self.parser) + def parse(self) -> Generator[None, None, None]: yield # not implemented yet diff --git a/src/websockets/server.py b/src/websockets/server.py index 095d9a17d..73156b33f 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -4,7 +4,17 @@ import email.utils import http import logging -from typing import Callable, Generator, List, Optional, Sequence, Tuple, Union, cast +from typing import ( + Any, + Callable, + Generator, + List, + Optional, + Sequence, + Tuple, + Union, + cast, +) from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve from .connection import CONNECTING, OPEN, SERVER, Connection @@ -61,8 +71,9 @@ def __init__( extensions: Optional[Sequence[ServerExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, + **kwargs: Any, ): - super().__init__(state=CONNECTING) + super().__init__(SERVER, CONNECTING, **kwargs) self.origins = origins self.available_extensions = extensions self.available_subprotocols = subprotocols @@ -403,13 +414,13 @@ def reject( headers.setdefault("Connection", "close") return Response(status.value, status.phrase, headers, body) - def send_response(self, response: Response) -> bytes: + def send_response(self, response: Response) -> None: """ - Convert a WebSocket handshake response to bytes to send to the client. + Send a WebSocket handshake response to the client. """ if response.status_code == 101: - self.state = OPEN + self.set_state(OPEN) logger.debug( "%s > HTTP/1.1 %d %s", @@ -421,7 +432,7 @@ def send_response(self, response: Response) -> bytes: if response.body is not None: logger.debug("%s > body (%d bytes)", self.side, len(response.body)) - return response.serialize() + self.writes.append(response.serialize()) def parse(self) -> Generator[None, None, None]: request = yield from Request.parse(self.reader.read_line) diff --git a/tests/test_client.py b/tests/test_client.py index eef8eb13e..7a78ee09b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -25,10 +25,10 @@ def test_send_connect(self): client = ClientConnection("wss://example.com/test") request = client.connect() self.assertIsInstance(request, Request) - bytes_to_send = client.send_request(request) + client.send_request(request) self.assertEqual( - bytes_to_send, - ( + client.bytes_to_send(), + [ f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" f"Upgrade: websocket\r\n" @@ -36,8 +36,8 @@ def test_send_connect(self): f"Sec-WebSocket-Key: {KEY}\r\n" f"Sec-WebSocket-Version: 13\r\n" f"User-Agent: {USER_AGENT}\r\n" - f"\r\n" - ).encode(), + f"\r\n".encode() + ], ) def test_connect_request(self): @@ -134,7 +134,7 @@ def test_receive_accept(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [response], bytes_to_send = client.receive_data( + client.receive_data( ( f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" @@ -145,15 +145,15 @@ def test_receive_accept(self): f"\r\n" ).encode(), ) + [response] = client.events_received() self.assertIsInstance(response, Response) - self.assertEqual(bytes_to_send, []) self.assertEqual(client.state, OPEN) def test_receive_reject(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [response], bytes_to_send = client.receive_data( + client.receive_data( ( f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" @@ -165,15 +165,15 @@ def test_receive_reject(self): f"Sorry folks.\n" ).encode(), ) + [response] = client.events_received() self.assertIsInstance(response, Response) - self.assertEqual(bytes_to_send, []) self.assertEqual(client.state, CONNECTING) def test_accept_response(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [response], _bytes_to_send = client.receive_data( + client.receive_data( ( f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" @@ -184,6 +184,7 @@ def test_accept_response(self): f"\r\n" ).encode(), ) + [response] = client.events_received() self.assertEqual(response.status_code, 101) self.assertEqual(response.reason_phrase, "Switching Protocols") self.assertEqual( @@ -204,7 +205,7 @@ def test_reject_response(self): with unittest.mock.patch("websockets.client.generate_key", return_value=KEY): client = ClientConnection("ws://example.com/test") client.connect() - [response], _bytes_to_send = client.receive_data( + client.receive_data( ( f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" @@ -216,6 +217,7 @@ def test_reject_response(self): f"Sorry folks.\n" ).encode(), ) + [response] = client.events_received() self.assertEqual(response.status_code, 404) self.assertEqual(response.reason_phrase, "Not Found") self.assertEqual( @@ -251,7 +253,8 @@ def make_accept_response(self, client): def test_basic(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) @@ -259,7 +262,8 @@ def test_missing_connection(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Connection"] - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -271,7 +275,8 @@ def test_invalid_connection(self): response = self.make_accept_response(client) del response.headers["Connection"] response.headers["Connection"] = "close" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -282,7 +287,8 @@ def test_missing_upgrade(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Upgrade"] - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -294,7 +300,8 @@ def test_invalid_upgrade(self): response = self.make_accept_response(client) del response.headers["Upgrade"] response.headers["Upgrade"] = "h2c" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -305,7 +312,8 @@ def test_missing_accept(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) del response.headers["Sec-WebSocket-Accept"] - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -316,7 +324,8 @@ def test_multiple_accept(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Accept"] = ACCEPT - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -332,7 +341,8 @@ def test_invalid_accept(self): response = self.make_accept_response(client) del response.headers["Sec-WebSocket-Accept"] response.headers["Sec-WebSocket-Accept"] = ACCEPT - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHeader) as raised: @@ -344,7 +354,8 @@ def test_invalid_accept(self): def test_no_extensions(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, []) @@ -355,7 +366,8 @@ def test_no_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension()]) @@ -366,7 +378,8 @@ def test_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [Rsv2Extension()]) @@ -375,7 +388,8 @@ def test_unexpected_extension(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: @@ -388,7 +402,8 @@ def test_unsupported_extension(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: @@ -404,7 +419,8 @@ def test_supported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=this" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension("this")]) @@ -415,7 +431,8 @@ def test_unsupported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: @@ -435,7 +452,8 @@ def test_multiple_supported_extension_parameters(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op=that" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension("that")]) @@ -448,7 +466,8 @@ def test_multiple_extensions(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-op; op" response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [OpExtension(), Rsv2Extension()]) @@ -461,7 +480,8 @@ def test_multiple_extensions_order(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Extensions"] = "x-rsv2" response.headers["Sec-WebSocket-Extensions"] = "x-op; op" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.extensions, [Rsv2Extension(), OpExtension()]) @@ -469,7 +489,8 @@ def test_multiple_extensions_order(self): def test_no_subprotocols(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertIsNone(client.subprotocol) @@ -477,7 +498,8 @@ def test_no_subprotocols(self): def test_no_subprotocol(self): client = ClientConnection("wss://example.com/", subprotocols=["chat"]) response = self.make_accept_response(client) - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertIsNone(client.subprotocol) @@ -486,7 +508,8 @@ def test_subprotocol(self): client = ClientConnection("wss://example.com/", subprotocols=["chat"]) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.subprotocol, "chat") @@ -495,7 +518,8 @@ def test_unexpected_subprotocol(self): client = ClientConnection("wss://example.com/") response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: @@ -509,7 +533,8 @@ def test_multiple_subprotocols(self): response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "superchat" response.headers["Sec-WebSocket-Protocol"] = "chat" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: @@ -524,7 +549,8 @@ def test_supported_subprotocol(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "chat" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, OPEN) self.assertEqual(client.subprotocol, "chat") @@ -535,7 +561,8 @@ def test_unsupported_subprotocol(self): ) response = self.make_accept_response(client) response.headers["Sec-WebSocket-Protocol"] = "otherchat" - [response], _bytes_to_send = client.receive_data(response.serialize()) + client.receive_data(response.serialize()) + [response] = client.events_received() self.assertEqual(client.state, CONNECTING) with self.assertRaises(InvalidHandshake) as raised: diff --git a/tests/test_server.py b/tests/test_server.py index 8b00cec11..a180b08e2 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -22,7 +22,7 @@ class ConnectTests(unittest.TestCase): def test_receive_connect(self): server = ServerConnection() - [request], bytes_to_send = server.receive_data( + server.receive_data( ( f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" @@ -34,12 +34,12 @@ def test_receive_connect(self): f"\r\n" ).encode(), ) + [request] = server.events_received() self.assertIsInstance(request, Request) - self.assertEqual(bytes_to_send, []) def test_connect_request(self): server = ServerConnection() - [request], bytes_to_send = server.receive_data( + server.receive_data( ( f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" @@ -51,6 +51,7 @@ def test_connect_request(self): f"\r\n" ).encode(), ) + [request] = server.events_received() self.assertEqual(request.path, "/test") self.assertEqual( request.headers, @@ -88,18 +89,18 @@ def test_send_accept(self): with unittest.mock.patch("email.utils.formatdate", return_value=DATE): response = server.accept(self.make_request()) self.assertIsInstance(response, Response) - bytes_to_send = server.send_response(response) + server.send_response(response) self.assertEqual( - bytes_to_send, - ( + server.bytes_to_send(), + [ f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" f"Connection: Upgrade\r\n" f"Sec-WebSocket-Accept: {ACCEPT}\r\n" f"Date: {DATE}\r\n" f"Server: {USER_AGENT}\r\n" - f"\r\n" - ).encode(), + f"\r\n".encode() + ], ) self.assertEqual(server.state, OPEN) @@ -108,10 +109,10 @@ def test_send_reject(self): with unittest.mock.patch("email.utils.formatdate", return_value=DATE): response = server.reject(http.HTTPStatus.NOT_FOUND, "Sorry folks.\n") self.assertIsInstance(response, Response) - bytes_to_send = server.send_response(response) + server.send_response(response) self.assertEqual( - bytes_to_send, - ( + server.bytes_to_send(), + [ f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" f"Server: {USER_AGENT}\r\n" @@ -119,8 +120,8 @@ def test_send_reject(self): f"Content-Type: text/plain; charset=utf-8\r\n" f"Connection: close\r\n" f"\r\n" - f"Sorry folks.\n" - ).encode(), + f"Sorry folks.\n".encode() + ], ) self.assertEqual(server.state, CONNECTING) From 207407404d2a1bfd95da040f3948892cbf17c950 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 13:30:59 +0200 Subject: [PATCH 209/281] Implement Sans-I/O data transfer. --- setup.cfg | 2 +- src/websockets/client.py | 6 +- src/websockets/connection.py | 337 +++- src/websockets/exceptions.py | 2 +- .../extensions/permessage_deflate.py | 4 +- src/websockets/frames.py | 41 +- src/websockets/framing.py | 12 +- src/websockets/protocol.py | 3 +- src/websockets/server.py | 16 +- tests/extensions/test_permessage_deflate.py | 2 +- tests/test_connection.py | 1418 +++++++++++++++++ tests/test_frames.py | 115 +- 12 files changed, 1847 insertions(+), 111 deletions(-) create mode 100644 tests/test_connection.py diff --git a/setup.cfg b/setup.cfg index 02e70cdf5..5448b0f9b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,7 +5,7 @@ python-tag = py36.py37 license_file = LICENSE [flake8] -ignore = E731,F403,F405,W503 +ignore = E203,E731,F403,F405,W503 max-line-length = 88 [isort] diff --git a/src/websockets/client.py b/src/websockets/client.py index d6250c7e9..3f9777b94 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -1,6 +1,6 @@ import collections import logging -from typing import Any, Generator, List, Optional, Sequence +from typing import Generator, List, Optional, Sequence from .asyncio_client import WebSocketClientProtocol, connect, unix_connect from .connection import CLIENT, CONNECTING, OPEN, Connection @@ -54,9 +54,9 @@ def __init__( extensions: Optional[Sequence[ClientExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLike] = None, - **kwargs: Any, + max_size: Optional[int] = 2 ** 20, ): - super().__init__(side=CLIENT, state=CONNECTING, **kwargs) + super().__init__(side=CLIENT, state=CONNECTING, max_size=max_size) self.wsuri = parse_uri(uri) self.origin = origin self.available_extensions = extensions diff --git a/src/websockets/connection.py b/src/websockets/connection.py index 616f2b3c2..ac30802db 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -1,14 +1,33 @@ import enum -from typing import Any, Generator, List, Tuple, Union - -from .exceptions import InvalidState -from .frames import Frame +import logging +from typing import Generator, List, Optional, Union + +from .exceptions import InvalidState, PayloadTooBig, ProtocolError +from .extensions.base import Extension +from .frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + Frame, + parse_close, + serialize_close, +) from .http11 import Request, Response from .streams import StreamReader +from .typing import Origin, Subprotocol -__all__ = ["Connection"] +__all__ = [ + "Connection", + "Side", + "State", + "SEND_EOF", +] +logger = logging.getLogger(__name__) Event = Union[Request, Response, Frame] @@ -37,45 +56,159 @@ class State(enum.IntEnum): CLOSED = State.CLOSED -class Connection: +# Sentinel to signal that the connection should be closed. - side: Side +SEND_EOF = b"" - def __init__(self, side: Side, state: State = OPEN, **kwargs: Any) -> None: + +class Connection: + def __init__( + self, side: Side, state: State = OPEN, max_size: Optional[int] = 2 ** 20, + ) -> None: + # Connection side. CLIENT or SERVER. self.side = side + + # Connnection state. CONNECTING and CLOSED states are handled in subclasses. + logger.debug("%s - initial state: %s", self.side, state.name) self.state = state + + # Maximum size of incoming messages in bytes. + self.max_size = max_size + + # Current size of incoming message in bytes. Only set while reading a + # fragmented message i.e. a data frames with the FIN bit not set. + self.cur_size: Optional[int] = None + + # True while sending a fragmented message i.e. a data frames with the + # FIN bit not set. + self.expect_continuation_frame = False + + # WebSocket protocol parameters. + self.origin: Optional[Origin] = None + self.extensions: List[Extension] = [] + self.subprotocol: Optional[Subprotocol] = None + + # Connection state isn't enough to tell if a close frame was received: + # when this side closes the connection, state is CLOSING as soon as a + # close frame is sent, before a close frame is received. + self.close_frame_received = False + + # Close code and reason. Set when receiving a close frame or when the + # TCP connection drops. + self.close_code: int + self.close_reason: str + + # Track if send_eof() was called. + self.eof_sent = False + + # Parser state. self.reader = StreamReader() self.events: List[Event] = [] self.writes: List[bytes] = [] self.parser = self.parse() next(self.parser) # start coroutine + self.parser_exc: Optional[Exception] = None def set_state(self, state: State) -> None: + logger.debug( + "%s - state change: %s > %s", self.side, self.state.name, state.name + ) self.state = state - # Public APIs for receiving data and producing events + # Public APIs for receiving data. def receive_data(self, data: bytes) -> None: + """ + Receive data from the connection. + + After calling this method: + + - You must call :meth:`bytes_to_send` and send this data. + - You should call :meth:`events_received` and process these events. + + """ self.reader.feed_data(data) self.step_parser() def receive_eof(self) -> None: + """ + Receive the end of the data stream from the connection. + + After calling this method: + + - You must call :meth:`bytes_to_send` and send this data. + - You shouldn't call :meth:`events_received` as it won't + return any new events. + + """ self.reader.feed_eof() self.step_parser() - # Public APIs for receiving events and producing data + # Public APIs for sending events. - def send_frame(self, frame: Frame) -> bytes: + def send_continuation(self, data: bytes, fin: bool) -> None: """ - Convert a WebSocket handshake response to bytes to send. + Send a continuation frame. """ - # Defensive assertion for protocol compliance. - if self.state != OPEN: - raise InvalidState( - f"Cannot write to a WebSocket in the {self.state.name} state" - ) - raise NotImplementedError # not implemented yet + if not self.expect_continuation_frame: + raise ProtocolError("unexpected continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(fin, OP_CONT, data)) + + def send_text(self, data: bytes, fin: bool = True) -> None: + """ + Send a text frame. + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(fin, OP_TEXT, data)) + + def send_binary(self, data: bytes, fin: bool = True) -> None: + """ + Send a binary frame. + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + self.expect_continuation_frame = not fin + self.send_frame(Frame(fin, OP_BINARY, data)) + + def send_close(self, code: Optional[int] = None, reason: str = "") -> None: + """ + Send a connection close frame. + + """ + if self.expect_continuation_frame: + raise ProtocolError("expected a continuation frame") + if code is None: + if reason != "": + raise ValueError("cannot send a reason without a code") + data = b"" + else: + data = serialize_close(code, reason) + self.send_frame(Frame(True, OP_CLOSE, data)) + # send_frame() guarantees that self.state is OPEN at this point. + # 7.1.3. The WebSocket Closing Handshake is Started + self.set_state(CLOSING) + if self.side is SERVER: + self.send_eof() + + def send_ping(self, data: bytes) -> None: + """ + Send a ping frame. + + """ + self.send_frame(Frame(True, OP_PING, data)) + + def send_pong(self, data: bytes) -> None: + """ + Send a pong frame. + + """ + self.send_frame(Frame(True, OP_PONG, data)) # Public API for getting incoming events after receiving data. @@ -105,19 +238,169 @@ def bytes_to_send(self) -> List[bytes]: writes, self.writes = self.writes, [] return writes - # Private APIs + # Private APIs for receiving data. - def receive(self) -> Tuple[List[Event], List[bytes]]: + def fail_connection(self, code: int = 1006, reason: str = "") -> None: + # Send a close frame when the state is OPEN (a close frame was already + # sent if it's CLOSING), except when failing the connection because of + # an error reading from or writing to the network. + if code != 1006 and self.state is OPEN: + self.send_frame(Frame(True, OP_CLOSE, serialize_close(code, reason))) + self.set_state(CLOSING) + if not self.eof_sent: + self.send_eof() + + def step_parser(self) -> None: # Run parser until more data is needed or EOF try: next(self.parser) except StopIteration: - pass - events, self.events = self.events, [] - return events, [] - - def step_parser(self) -> None: - next(self.parser) + # This happens if receive_data() or receive_eof() is called after + # the parser raised an exception. (It cannot happen after reaching + # EOF because receive_data() or receive_eof() would fail earlier.) + assert self.parser_exc is not None + raise RuntimeError( + "cannot receive data or EOF after an error" + ) from self.parser_exc + except ProtocolError as exc: + self.fail_connection(1002, str(exc)) + self.parser_exc = exc + raise + except EOFError as exc: + self.fail_connection(1006, str(exc)) + self.parser_exc = exc + raise + except UnicodeDecodeError as exc: + self.fail_connection(1007, f"{exc.reason} at position {exc.start}") + self.parser_exc = exc + raise + except PayloadTooBig as exc: + self.fail_connection(1009, str(exc)) + self.parser_exc = exc + raise + except Exception as exc: + logger.exception("unexpected exception in parser") + # Don't include exception details, which may be security-sensitive. + self.fail_connection(1011) + self.parser_exc = exc + raise def parse(self) -> Generator[None, None, None]: - yield # not implemented yet + while True: + eof = yield from self.reader.at_eof() + if eof: + if self.close_frame_received: + if not self.eof_sent: + self.send_eof() + yield + # Once the reader reaches EOF, its feed_data/eof() methods + # raise an error, so our receive_data/eof() methods never + # call step_parser(), so the generator shouldn't resume + # executing until it's garbage collected. + raise AssertionError( + "parser shouldn't step after EOF" + ) # pragma: no cover + else: + raise EOFError("unexpected end of stream") + + if self.max_size is None: + max_size = None + elif self.cur_size is None: + max_size = self.max_size + else: + max_size = self.max_size - self.cur_size + + frame = yield from Frame.parse( + self.reader.read_exact, + mask=self.side is SERVER, + max_size=max_size, + extensions=self.extensions, + ) + + if frame.opcode is OP_TEXT or frame.opcode is OP_BINARY: + # 5.5.1 Close: "The application MUST NOT send any more data + # frames after sending a Close frame." + if self.close_frame_received: + raise ProtocolError("data frame after close frame") + + if self.cur_size is not None: + raise ProtocolError("expected a continuation frame") + if frame.fin: + self.cur_size = None + else: + self.cur_size = len(frame.data) + + elif frame.opcode is OP_CONT: + # 5.5.1 Close: "The application MUST NOT send any more data + # frames after sending a Close frame." + if self.close_frame_received: + raise ProtocolError("data frame after close frame") + + if self.cur_size is None: + raise ProtocolError("unexpected continuation frame") + if frame.fin: + self.cur_size = None + else: + self.cur_size += len(frame.data) + + elif frame.opcode is OP_PING: + # 5.5.2. Ping: "Upon receipt of a Ping frame, an endpoint MUST + # send a Pong frame in response, unless it already received a + # Close frame." + if not self.close_frame_received: + pong_frame = Frame(True, OP_PONG, frame.data) + self.send_frame(pong_frame) + + elif frame.opcode is OP_PONG: + # 5.5.3 Pong: "A response to an unsolicited Pong frame is not + # expected." + pass + + elif frame.opcode is OP_CLOSE: + self.close_frame_received = True + # 7.1.5. The WebSocket Connection Close Code + # 7.1.6. The WebSocket Connection Close Reason + self.close_code, self.close_reason = parse_close(frame.data) + + if self.cur_size is not None: + raise ProtocolError("incomplete fragmented message") + # 5.5.1 Close: "If an endpoint receives a Close frame and did + # not previously send a Close frame, the endpoint MUST send a + # Close frame in response. (When sending a Close frame in + # response, the endpoint typically echos the status code it + # received.)" + if self.state is OPEN: + # Echo the original data instead of re-serializing it with + # serialize_close() because that fails when the close frame + # is empty and parse_close() synthetizes a 1005 close code. + # The rest is identical to send_close(). + self.send_frame(Frame(True, OP_CLOSE, frame.data)) + self.set_state(CLOSING) + if self.side is SERVER: + self.send_eof() + + else: # pragma: no cover + # This can't happen because Frame.parse() validates opcodes. + raise AssertionError(f"unexpected opcode: {frame.opcode:02x}") + + self.events.append(frame) + + # Private APIs for sending events. + + def send_frame(self, frame: Frame) -> None: + # Defensive assertion for protocol compliance. + if self.state is not OPEN: + raise InvalidState( + f"cannot write to a WebSocket in the {self.state.name} state" + ) + + logger.debug("%s > %r", self.side, frame) + self.writes.append( + frame.serialize(mask=self.side is CLIENT, extensions=self.extensions) + ) + + def send_eof(self) -> None: + assert not self.eof_sent + self.eof_sent = True + logger.debug("%s > EOF", self.side) + self.writes.append(SEND_EOF) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index e593f1adc..c60a3e10e 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -358,7 +358,7 @@ class PayloadTooBig(WebSocketException): class ProtocolError(WebSocketException): """ - Raised when the other side breaks the protocol. + Raised when a frame breaks the protocol. """ diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index f1adf8bb6..184183061 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -128,9 +128,7 @@ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: max_length = 0 if max_size is None else max_size data = self.decoder.decompress(data, max_length) if self.decoder.unconsumed_tail: - raise PayloadTooBig( - f"Uncompressed payload length exceeds size limit (? > {max_size} bytes)" - ) + raise PayloadTooBig(f"over size limit (? > {max_size} bytes)") # Allow garbage collection of the decoder if it won't be reused. if frame.fin and self.remote_no_context_takeover: diff --git a/src/websockets/frames.py b/src/websockets/frames.py index 56dcf6171..2ff9dbd91 100644 --- a/src/websockets/frames.py +++ b/src/websockets/frames.py @@ -3,6 +3,7 @@ """ +import enum import io import secrets import struct @@ -19,14 +20,15 @@ __all__ = [ - "DATA_OPCODES", - "CTRL_OPCODES", + "Opcode", "OP_CONT", "OP_TEXT", "OP_BINARY", "OP_CLOSE", "OP_PING", "OP_PONG", + "DATA_OPCODES", + "CTRL_OPCODES", "Frame", "prepare_data", "prepare_ctrl", @@ -34,8 +36,21 @@ "serialize_close", ] -DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY = 0x00, 0x01, 0x02 -CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG = 0x08, 0x09, 0x0A + +class Opcode(enum.IntEnum): + CONT, TEXT, BINARY = 0x00, 0x01, 0x02 + CLOSE, PING, PONG = 0x08, 0x09, 0x0A + + +OP_CONT = Opcode.CONT +OP_TEXT = Opcode.TEXT +OP_BINARY = Opcode.BINARY +OP_CLOSE = Opcode.CLOSE +OP_PING = Opcode.PING +OP_PONG = Opcode.PONG + +DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY +CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG # Close code that are allowed in a close frame. # Using a list optimizes `code in EXTERNAL_CLOSE_CODES`. @@ -62,7 +77,7 @@ class Frame(NamedTuple): """ fin: bool - opcode: int + opcode: Opcode data: bytes rsv1: bool = False rsv2: bool = False @@ -103,7 +118,11 @@ def parse( rsv1 = True if head1 & 0b01000000 else False rsv2 = True if head1 & 0b00100000 else False rsv3 = True if head1 & 0b00010000 else False - opcode = head1 & 0b00001111 + + try: + opcode = Opcode(head1 & 0b00001111) + except ValueError as exc: + raise ProtocolError("invalid opcode") from exc if (True if head2 & 0b10000000 else False) != mask: raise ProtocolError("incorrect masking") @@ -116,9 +135,7 @@ def parse( data = yield from read_exact(8) (length,) = struct.unpack("!Q", data) if max_size is not None and length > max_size: - raise PayloadTooBig( - f"payload length exceeds size limit ({length} > {max_size} bytes)" - ) + raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)") if mask: mask_bytes = yield from read_exact(4) @@ -209,15 +226,11 @@ def check(self) -> None: if self.rsv1 or self.rsv2 or self.rsv3: raise ProtocolError("reserved bits must be 0") - if self.opcode in DATA_OPCODES: - return - elif self.opcode in CTRL_OPCODES: + if self.opcode in CTRL_OPCODES: if len(self.data) > 125: raise ProtocolError("control frame too long") if not self.fin: raise ProtocolError("fragmented control frame") - else: - raise ProtocolError(f"invalid opcode: {self.opcode}") def prepare_data(data: Data) -> Tuple[int, bytes]: diff --git a/src/websockets/framing.py b/src/websockets/framing.py index 221afad6f..b2996d788 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -15,7 +15,7 @@ from typing import Any, Awaitable, Callable, Optional, Sequence from .exceptions import PayloadTooBig, ProtocolError -from .frames import Frame as NewFrame +from .frames import Frame as NewFrame, Opcode try: @@ -64,7 +64,11 @@ async def read( rsv1 = True if head1 & 0b01000000 else False rsv2 = True if head1 & 0b00100000 else False rsv3 = True if head1 & 0b00010000 else False - opcode = head1 & 0b00001111 + + try: + opcode = Opcode(head1 & 0b00001111) + except ValueError as exc: + raise ProtocolError("invalid opcode") from exc if (True if head2 & 0b10000000 else False) != mask: raise ProtocolError("incorrect masking") @@ -77,9 +81,7 @@ async def read( data = await reader(8) (length,) = struct.unpack("!Q", data) if max_size is not None and length > max_size: - raise PayloadTooBig( - f"payload length exceeds size limit ({length} > {max_size} bytes)" - ) + raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)") if mask: mask_bits = await reader(4) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 58c4569d0..2e5d95e06 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -47,6 +47,7 @@ OP_PING, OP_PONG, OP_TEXT, + Opcode, parse_close, prepare_ctrl, prepare_data, @@ -1071,7 +1072,7 @@ async def write_frame( f"Cannot write to a WebSocket in the {self.state.name} state" ) - frame = Frame(fin, opcode, data) + frame = Frame(fin, Opcode(opcode), data) logger.debug("%s > %r", self.side, frame) frame.write( self.transport.write, mask=self.is_client, extensions=self.extensions diff --git a/src/websockets/server.py b/src/websockets/server.py index 73156b33f..1b03eabee 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -4,17 +4,7 @@ import email.utils import http import logging -from typing import ( - Any, - Callable, - Generator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) +from typing import Callable, Generator, List, Optional, Sequence, Tuple, Union, cast from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve from .connection import CONNECTING, OPEN, SERVER, Connection @@ -71,9 +61,9 @@ def __init__( extensions: Optional[Sequence[ServerExtensionFactory]] = None, subprotocols: Optional[Sequence[Subprotocol]] = None, extra_headers: Optional[HeadersLikeOrCallable] = None, - **kwargs: Any, + max_size: Optional[int] = 2 ** 20, ): - super().__init__(SERVER, CONNECTING, **kwargs) + super().__init__(side=SERVER, state=CONNECTING, max_size=max_size) self.origins = origins self.available_extensions = extensions self.available_subprotocols = subprotocols diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index e1193e672..f9fca1999 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -243,7 +243,7 @@ def test_compress_settings(self): ), ) - # Frames aren't decoded beyond max_length. + # Frames aren't decoded beyond max_size. def test_decompress_max_size(self): frame = Frame(True, OP_TEXT, ("a" * 20).encode("utf-8")) diff --git a/tests/test_connection.py b/tests/test_connection.py new file mode 100644 index 000000000..5c0f7302f --- /dev/null +++ b/tests/test_connection.py @@ -0,0 +1,1418 @@ +import unittest.mock + +from websockets.connection import * +from websockets.exceptions import InvalidState, PayloadTooBig, ProtocolError +from websockets.frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + Frame, + serialize_close, +) + +from .extensions.utils import Rsv2Extension +from .test_frames import FramesTestCase + + +class ConnectionTestCase(FramesTestCase): + def assertFrameSent(self, connection, frame, eof=False): + """ + Outgoing data for ``connection`` contains the given frame. + + ``frame`` may be ``None`` if no frame is expected. + + When ``eof`` is ``True``, the end of the stream is also expected. + + """ + frames_sent = [ + None + if write is SEND_EOF + else self.parse( + write, + mask=connection.side is Side.CLIENT, + extensions=connection.extensions, + ) + for write in connection.bytes_to_send() + ] + frames_expected = [] if frame is None else [frame] + if eof: + frames_expected += [None] + self.assertEqual(frames_sent, frames_expected) + + def assertFrameReceived(self, connection, frame): + """ + Incoming data for ``connection`` contains the given frame. + + ``frame`` may be ``None`` if no frame is expected. + + """ + frames_received = connection.events_received() + frames_expected = [] if frame is None else [frame] + self.assertEqual(frames_received, frames_expected) + + def assertConnectionClosing(self, connection, code=None, reason=""): + """ + Incoming data caused the "Start the WebSocket Closing Handshake" process. + + """ + close_frame = Frame( + True, OP_CLOSE, b"" if code is None else serialize_close(code, reason), + ) + # A close frame was received. + self.assertFrameReceived(connection, close_frame) + # A close frame and possibly the end of stream were sent. + self.assertFrameSent( + connection, close_frame, eof=connection.side is Side.SERVER + ) + + def assertConnectionFailing(self, connection, code=None, reason=""): + """ + Incoming data caused the "Fail the WebSocket Connection" process. + + """ + close_frame = Frame( + True, OP_CLOSE, b"" if code is None else serialize_close(code, reason), + ) + # No frame was received. + self.assertFrameReceived(connection, None) + # A close frame and the end of stream were sent. + self.assertFrameSent(connection, close_frame, eof=True) + + +class MaskingTests(ConnectionTestCase): + """ + Test frame masking. + + 5.1. Overview + + """ + + unmasked_text_frame_date = b"\x81\x04Spam" + masked_text_frame_data = b"\x81\x84\x00\xff\x00\xff\x53\x8f\x61\x92" + + def test_client_sends_masked_frame(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\xff\x00\xff"): + client.send_text(b"Spam", True) + self.assertEqual(client.bytes_to_send(), [self.masked_text_frame_data]) + + def test_server_sends_unmasked_frame(self): + server = Connection(Side.SERVER) + server.send_text(b"Spam", True) + self.assertEqual(server.bytes_to_send(), [self.unmasked_text_frame_date]) + + def test_client_receives_unmasked_frame(self): + client = Connection(Side.CLIENT) + client.receive_data(self.unmasked_text_frame_date) + self.assertFrameReceived( + client, Frame(True, OP_TEXT, b"Spam"), + ) + + def test_server_receives_masked_frame(self): + server = Connection(Side.SERVER) + server.receive_data(self.masked_text_frame_data) + self.assertFrameReceived( + server, Frame(True, OP_TEXT, b"Spam"), + ) + + def test_client_receives_masked_frame(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(self.masked_text_frame_data) + self.assertEqual(str(raised.exception), "incorrect masking") + self.assertConnectionFailing(client, 1002, "incorrect masking") + + def test_server_receives_unmasked_frame(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(self.unmasked_text_frame_date) + self.assertEqual(str(raised.exception), "incorrect masking") + self.assertConnectionFailing(server, 1002, "incorrect masking") + + +class ContinuationTests(ConnectionTestCase): + """ + Test continuation frames without text or binary frames. + + """ + + def test_client_sends_unexpected_continuation(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.send_continuation(b"", fin=False) + self.assertEqual(str(raised.exception), "unexpected continuation frame") + + def test_server_sends_unexpected_continuation(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.send_continuation(b"", fin=False) + self.assertEqual(str(raised.exception), "unexpected continuation frame") + + def test_client_receives_unexpected_continuation(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x00\x00") + self.assertEqual(str(raised.exception), "unexpected continuation frame") + self.assertConnectionFailing(client, 1002, "unexpected continuation frame") + + def test_server_receives_unexpected_continuation(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x00\x80\x00\x00\x00\x00") + self.assertEqual(str(raised.exception), "unexpected continuation frame") + self.assertConnectionFailing(server, 1002, "unexpected continuation frame") + + def test_client_sends_continuation_after_sending_close(self): + client = Connection(Side.CLIENT) + # Since it isn't possible to send a close frame in a fragmented + # message (see test_client_send_close_in_fragmented_message), in fact, + # this is the same test as test_client_sends_unexpected_continuation. + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + with self.assertRaises(ProtocolError) as raised: + client.send_continuation(b"", fin=False) + self.assertEqual(str(raised.exception), "unexpected continuation frame") + + def test_server_sends_continuation_after_sending_close(self): + # Since it isn't possible to send a close frame in a fragmented + # message (see test_server_send_close_in_fragmented_message), in fact, + # this is the same test as test_server_sends_unexpected_continuation. + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + with self.assertRaises(ProtocolError) as raised: + server.send_continuation(b"", fin=False) + self.assertEqual(str(raised.exception), "unexpected continuation frame") + + def test_client_receives_continuation_after_receiving_close(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x00\x00") + self.assertEqual(str(raised.exception), "data frame after close frame") + + def test_server_receives_continuation_after_receiving_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x00\x80\x00\xff\x00\xff") + self.assertEqual(str(raised.exception), "data frame after close frame") + + +class TextTests(ConnectionTestCase): + """ + Test text frames and continuation frames. + + """ + + def test_client_sends_text(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_text("😀".encode()) + self.assertEqual( + client.bytes_to_send(), [b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80"] + ) + + def test_server_sends_text(self): + server = Connection(Side.SERVER) + server.send_text("😀".encode()) + self.assertEqual(server.bytes_to_send(), [b"\x81\x04\xf0\x9f\x98\x80"]) + + def test_client_receives_text(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x81\x04\xf0\x9f\x98\x80") + self.assertFrameReceived( + client, Frame(True, OP_TEXT, "😀".encode()), + ) + + def test_server_receives_text(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80") + self.assertFrameReceived( + server, Frame(True, OP_TEXT, "😀".encode()), + ) + + def test_client_receives_text_over_size_limit(self): + client = Connection(Side.CLIENT, max_size=3) + with self.assertRaises(PayloadTooBig) as raised: + client.receive_data(b"\x81\x04\xf0\x9f\x98\x80") + self.assertEqual(str(raised.exception), "over size limit (4 > 3 bytes)") + self.assertConnectionFailing(client, 1009, "over size limit (4 > 3 bytes)") + + def test_server_receives_text_over_size_limit(self): + server = Connection(Side.SERVER, max_size=3) + with self.assertRaises(PayloadTooBig) as raised: + server.receive_data(b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80") + self.assertEqual(str(raised.exception), "over size limit (4 > 3 bytes)") + self.assertConnectionFailing(server, 1009, "over size limit (4 > 3 bytes)") + + def test_client_receives_text_without_size_limit(self): + client = Connection(Side.CLIENT, max_size=None) + client.receive_data(b"\x81\x04\xf0\x9f\x98\x80") + self.assertFrameReceived( + client, Frame(True, OP_TEXT, "😀".encode()), + ) + + def test_server_receives_text_without_size_limit(self): + server = Connection(Side.SERVER, max_size=None) + server.receive_data(b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80") + self.assertFrameReceived( + server, Frame(True, OP_TEXT, "😀".encode()), + ) + + def test_client_sends_fragmented_text(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_text("😀".encode()[:2], fin=False) + self.assertEqual(client.bytes_to_send(), [b"\x01\x82\x00\x00\x00\x00\xf0\x9f"]) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_continuation("😀😀".encode()[2:6], fin=False) + self.assertEqual( + client.bytes_to_send(), [b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f"] + ) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_continuation("😀".encode()[2:], fin=True) + self.assertEqual(client.bytes_to_send(), [b"\x80\x82\x00\x00\x00\x00\x98\x80"]) + + def test_server_sends_fragmented_text(self): + server = Connection(Side.SERVER) + server.send_text("😀".encode()[:2], fin=False) + self.assertEqual(server.bytes_to_send(), [b"\x01\x02\xf0\x9f"]) + server.send_continuation("😀😀".encode()[2:6], fin=False) + self.assertEqual(server.bytes_to_send(), [b"\x00\x04\x98\x80\xf0\x9f"]) + server.send_continuation("😀".encode()[2:], fin=True) + self.assertEqual(server.bytes_to_send(), [b"\x80\x02\x98\x80"]) + + def test_client_receives_fragmented_text(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x01\x02\xf0\x9f") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + client.receive_data(b"\x00\x04\x98\x80\xf0\x9f") + self.assertFrameReceived( + client, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + ) + client.receive_data(b"\x80\x02\x98\x80") + self.assertFrameReceived( + client, Frame(True, OP_CONT, "😀".encode()[2:]), + ) + + def test_server_receives_fragmented_text(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + server.receive_data(b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f") + self.assertFrameReceived( + server, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + ) + server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") + self.assertFrameReceived( + server, Frame(True, OP_CONT, "😀".encode()[2:]), + ) + + def test_client_receives_fragmented_text_over_size_limit(self): + client = Connection(Side.CLIENT, max_size=3) + client.receive_data(b"\x01\x02\xf0\x9f") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + with self.assertRaises(PayloadTooBig) as raised: + client.receive_data(b"\x80\x02\x98\x80") + self.assertEqual(str(raised.exception), "over size limit (2 > 1 bytes)") + self.assertConnectionFailing(client, 1009, "over size limit (2 > 1 bytes)") + + def test_server_receives_fragmented_text_over_size_limit(self): + server = Connection(Side.SERVER, max_size=3) + server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + with self.assertRaises(PayloadTooBig) as raised: + server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") + self.assertEqual(str(raised.exception), "over size limit (2 > 1 bytes)") + self.assertConnectionFailing(server, 1009, "over size limit (2 > 1 bytes)") + + def test_client_receives_fragmented_text_without_size_limit(self): + client = Connection(Side.CLIENT, max_size=None) + client.receive_data(b"\x01\x02\xf0\x9f") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + client.receive_data(b"\x00\x04\x98\x80\xf0\x9f") + self.assertFrameReceived( + client, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + ) + client.receive_data(b"\x80\x02\x98\x80") + self.assertFrameReceived( + client, Frame(True, OP_CONT, "😀".encode()[2:]), + ) + + def test_server_receives_fragmented_text_without_size_limit(self): + server = Connection(Side.SERVER, max_size=None) + server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, "😀".encode()[:2]), + ) + server.receive_data(b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f") + self.assertFrameReceived( + server, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + ) + server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") + self.assertFrameReceived( + server, Frame(True, OP_CONT, "😀".encode()[2:]), + ) + + def test_client_sends_unexpected_text(self): + client = Connection(Side.CLIENT) + client.send_text(b"", fin=False) + with self.assertRaises(ProtocolError) as raised: + client.send_text(b"", fin=False) + self.assertEqual(str(raised.exception), "expected a continuation frame") + + def test_server_sends_unexpected_text(self): + server = Connection(Side.SERVER) + server.send_text(b"", fin=False) + with self.assertRaises(ProtocolError) as raised: + server.send_text(b"", fin=False) + self.assertEqual(str(raised.exception), "expected a continuation frame") + + def test_client_receives_unexpected_text(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x01\x00") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, b""), + ) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x01\x00") + self.assertEqual(str(raised.exception), "expected a continuation frame") + self.assertConnectionFailing(client, 1002, "expected a continuation frame") + + def test_server_receives_unexpected_text(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x01\x80\x00\x00\x00\x00") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, b""), + ) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x01\x80\x00\x00\x00\x00") + self.assertEqual(str(raised.exception), "expected a continuation frame") + self.assertConnectionFailing(server, 1002, "expected a continuation frame") + + def test_client_sends_text_after_sending_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + with self.assertRaises(InvalidState): + client.send_text(b"") + + def test_server_sends_text_after_sending_close(self): + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + with self.assertRaises(InvalidState): + server.send_text(b"") + + def test_client_receives_text_after_receiving_close(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x81\x00") + self.assertEqual(str(raised.exception), "data frame after close frame") + + def test_server_receives_text_after_receiving_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x81\x80\x00\xff\x00\xff") + self.assertEqual(str(raised.exception), "data frame after close frame") + + +class BinaryTests(ConnectionTestCase): + """ + Test binary frames and continuation frames. + + """ + + def test_client_sends_binary(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_binary(b"\x01\x02\xfe\xff") + self.assertEqual( + client.bytes_to_send(), [b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff"] + ) + + def test_server_sends_binary(self): + server = Connection(Side.SERVER) + server.send_binary(b"\x01\x02\xfe\xff") + self.assertEqual(server.bytes_to_send(), [b"\x82\x04\x01\x02\xfe\xff"]) + + def test_client_receives_binary(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x82\x04\x01\x02\xfe\xff") + self.assertFrameReceived( + client, Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), + ) + + def test_server_receives_binary(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff") + self.assertFrameReceived( + server, Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), + ) + + def test_client_receives_binary_over_size_limit(self): + client = Connection(Side.CLIENT, max_size=3) + with self.assertRaises(PayloadTooBig) as raised: + client.receive_data(b"\x82\x04\x01\x02\xfe\xff") + self.assertEqual(str(raised.exception), "over size limit (4 > 3 bytes)") + self.assertConnectionFailing(client, 1009, "over size limit (4 > 3 bytes)") + + def test_server_receives_binary_over_size_limit(self): + server = Connection(Side.SERVER, max_size=3) + with self.assertRaises(PayloadTooBig) as raised: + server.receive_data(b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff") + self.assertEqual(str(raised.exception), "over size limit (4 > 3 bytes)") + self.assertConnectionFailing(server, 1009, "over size limit (4 > 3 bytes)") + + def test_client_sends_fragmented_binary(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_binary(b"\x01\x02", fin=False) + self.assertEqual(client.bytes_to_send(), [b"\x02\x82\x00\x00\x00\x00\x01\x02"]) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_continuation(b"\xee\xff\x01\x02", fin=False) + self.assertEqual( + client.bytes_to_send(), [b"\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02"] + ) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_continuation(b"\xee\xff", fin=True) + self.assertEqual(client.bytes_to_send(), [b"\x80\x82\x00\x00\x00\x00\xee\xff"]) + + def test_server_sends_fragmented_binary(self): + server = Connection(Side.SERVER) + server.send_binary(b"\x01\x02", fin=False) + self.assertEqual(server.bytes_to_send(), [b"\x02\x02\x01\x02"]) + server.send_continuation(b"\xee\xff\x01\x02", fin=False) + self.assertEqual(server.bytes_to_send(), [b"\x00\x04\xee\xff\x01\x02"]) + server.send_continuation(b"\xee\xff", fin=True) + self.assertEqual(server.bytes_to_send(), [b"\x80\x02\xee\xff"]) + + def test_client_receives_fragmented_binary(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x02\x02\x01\x02") + self.assertFrameReceived( + client, Frame(False, OP_BINARY, b"\x01\x02"), + ) + client.receive_data(b"\x00\x04\xfe\xff\x01\x02") + self.assertFrameReceived( + client, Frame(False, OP_CONT, b"\xfe\xff\x01\x02"), + ) + client.receive_data(b"\x80\x02\xfe\xff") + self.assertFrameReceived( + client, Frame(True, OP_CONT, b"\xfe\xff"), + ) + + def test_server_receives_fragmented_binary(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x02\x82\x00\x00\x00\x00\x01\x02") + self.assertFrameReceived( + server, Frame(False, OP_BINARY, b"\x01\x02"), + ) + server.receive_data(b"\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02") + self.assertFrameReceived( + server, Frame(False, OP_CONT, b"\xee\xff\x01\x02"), + ) + server.receive_data(b"\x80\x82\x00\x00\x00\x00\xfe\xff") + self.assertFrameReceived( + server, Frame(True, OP_CONT, b"\xfe\xff"), + ) + + def test_client_receives_fragmented_binary_over_size_limit(self): + client = Connection(Side.CLIENT, max_size=3) + client.receive_data(b"\x02\x02\x01\x02") + self.assertFrameReceived( + client, Frame(False, OP_BINARY, b"\x01\x02"), + ) + with self.assertRaises(PayloadTooBig) as raised: + client.receive_data(b"\x80\x02\xfe\xff") + self.assertEqual(str(raised.exception), "over size limit (2 > 1 bytes)") + self.assertConnectionFailing(client, 1009, "over size limit (2 > 1 bytes)") + + def test_server_receives_fragmented_binary_over_size_limit(self): + server = Connection(Side.SERVER, max_size=3) + server.receive_data(b"\x02\x82\x00\x00\x00\x00\x01\x02") + self.assertFrameReceived( + server, Frame(False, OP_BINARY, b"\x01\x02"), + ) + with self.assertRaises(PayloadTooBig) as raised: + server.receive_data(b"\x80\x82\x00\x00\x00\x00\xfe\xff") + self.assertEqual(str(raised.exception), "over size limit (2 > 1 bytes)") + self.assertConnectionFailing(server, 1009, "over size limit (2 > 1 bytes)") + + def test_client_sends_unexpected_binary(self): + client = Connection(Side.CLIENT) + client.send_binary(b"", fin=False) + with self.assertRaises(ProtocolError) as raised: + client.send_binary(b"", fin=False) + self.assertEqual(str(raised.exception), "expected a continuation frame") + + def test_server_sends_unexpected_binary(self): + server = Connection(Side.SERVER) + server.send_binary(b"", fin=False) + with self.assertRaises(ProtocolError) as raised: + server.send_binary(b"", fin=False) + self.assertEqual(str(raised.exception), "expected a continuation frame") + + def test_client_receives_unexpected_binary(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x02\x00") + self.assertFrameReceived( + client, Frame(False, OP_BINARY, b""), + ) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x02\x00") + self.assertEqual(str(raised.exception), "expected a continuation frame") + self.assertConnectionFailing(client, 1002, "expected a continuation frame") + + def test_server_receives_unexpected_binary(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x02\x80\x00\x00\x00\x00") + self.assertFrameReceived( + server, Frame(False, OP_BINARY, b""), + ) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x02\x80\x00\x00\x00\x00") + self.assertEqual(str(raised.exception), "expected a continuation frame") + self.assertConnectionFailing(server, 1002, "expected a continuation frame") + + def test_client_sends_binary_after_sending_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + with self.assertRaises(InvalidState): + client.send_binary(b"") + + def test_server_sends_binary_after_sending_close(self): + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + with self.assertRaises(InvalidState): + server.send_binary(b"") + + def test_client_receives_binary_after_receiving_close(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x82\x00") + self.assertEqual(str(raised.exception), "data frame after close frame") + + def test_server_receives_binary_after_receiving_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x82\x80\x00\xff\x00\xff") + self.assertEqual(str(raised.exception), "data frame after close frame") + + +class CloseTests(ConnectionTestCase): + """ + Test close frames. See 5.5.1. Close in RFC 6544. + + """ + + def test_client_sends_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x3c\x3c\x3c\x3c"): + client.send_close() + self.assertEqual(client.bytes_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) + self.assertIs(client.state, State.CLOSING) + + def test_server_sends_close(self): + server = Connection(Side.SERVER) + server.send_close() + self.assertEqual(server.bytes_to_send(), [b"\x88\x00", b""]) + self.assertIs(server.state, State.CLOSING) + + def test_client_receives_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x3c\x3c\x3c\x3c"): + client.receive_data(b"\x88\x00") + self.assertEqual(client.events_received(), [Frame(True, OP_CLOSE, b"")]) + self.assertEqual(client.bytes_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) + self.assertIs(client.state, State.CLOSING) + + def test_server_receives_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertEqual(server.events_received(), [Frame(True, OP_CLOSE, b"")]) + self.assertEqual(server.bytes_to_send(), [b"\x88\x00", b""]) + self.assertIs(server.state, State.CLOSING) + + def test_client_sends_close_then_receives_close(self): + # Client-initiated close handshake on the client side. + client = Connection(Side.CLIENT) + + client.send_close() + self.assertFrameReceived(client, None) + self.assertFrameSent(client, Frame(True, OP_CLOSE, b"")) + + client.receive_data(b"\x88\x00") + self.assertFrameReceived(client, Frame(True, OP_CLOSE, b"")) + self.assertFrameSent(client, None) + + client.receive_eof() + self.assertFrameReceived(client, None) + self.assertFrameSent(client, None, eof=True) + + def test_server_sends_close_then_receives_close(self): + # Server-initiated close handshake on the server side. + server = Connection(Side.SERVER) + + server.send_close() + self.assertFrameReceived(server, None) + self.assertFrameSent(server, Frame(True, OP_CLOSE, b""), eof=True) + + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertFrameReceived(server, Frame(True, OP_CLOSE, b"")) + self.assertFrameSent(server, None) + + server.receive_eof() + self.assertFrameReceived(server, None) + self.assertFrameSent(server, None) + + def test_client_receives_close_then_sends_close(self): + # Server-initiated close handshake on the client side. + client = Connection(Side.CLIENT) + + client.receive_data(b"\x88\x00") + self.assertFrameReceived(client, Frame(True, OP_CLOSE, b"")) + self.assertFrameSent(client, Frame(True, OP_CLOSE, b"")) + + client.receive_eof() + self.assertFrameReceived(client, None) + self.assertFrameSent(client, None, eof=True) + + def test_server_receives_close_then_sends_close(self): + # Client-initiated close handshake on the server side. + server = Connection(Side.SERVER) + + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertFrameReceived(server, Frame(True, OP_CLOSE, b"")) + self.assertFrameSent(server, Frame(True, OP_CLOSE, b""), eof=True) + + server.receive_eof() + self.assertFrameReceived(server, None) + self.assertFrameSent(server, None) + + def test_client_sends_close_with_code(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertIs(client.state, State.CLOSING) + + def test_server_sends_close_with_code(self): + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertIs(server.state, State.CLOSING) + + def test_client_receives_close_with_code(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000, "") + self.assertIs(client.state, State.CLOSING) + + def test_server_receives_close_with_code(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001, "") + self.assertIs(server.state, State.CLOSING) + + def test_client_sends_close_with_code_and_reason(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001, "going away") + self.assertEqual( + client.bytes_to_send(), [b"\x88\x8c\x00\x00\x00\x00\x03\xe9going away"] + ) + self.assertIs(client.state, State.CLOSING) + + def test_server_sends_close_with_code_and_reason(self): + server = Connection(Side.SERVER) + server.send_close(1000, "OK") + self.assertEqual(server.bytes_to_send(), [b"\x88\x04\x03\xe8OK", b""]) + self.assertIs(server.state, State.CLOSING) + + def test_client_receives_close_with_code_and_reason(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x04\x03\xe8OK") + self.assertConnectionClosing(client, 1000, "OK") + self.assertIs(client.state, State.CLOSING) + + def test_server_receives_close_with_code_and_reason(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x8c\x00\x00\x00\x00\x03\xe9going away") + self.assertConnectionClosing(server, 1001, "going away") + self.assertIs(server.state, State.CLOSING) + + def test_client_sends_close_with_reason_only(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ValueError) as raised: + client.send_close(reason="going away") + self.assertEqual(str(raised.exception), "cannot send a reason without a code") + + def test_server_sends_close_with_reason_only(self): + server = Connection(Side.SERVER) + with self.assertRaises(ValueError) as raised: + server.send_close(reason="OK") + self.assertEqual(str(raised.exception), "cannot send a reason without a code") + + def test_client_receives_close_with_truncated_code(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x88\x01\x03") + self.assertEqual(str(raised.exception), "close frame too short") + self.assertConnectionFailing(client, 1002, "close frame too short") + self.assertIs(client.state, State.CLOSING) + + def test_server_receives_close_with_truncated_code(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x88\x81\x00\x00\x00\x00\x03") + self.assertEqual(str(raised.exception), "close frame too short") + self.assertConnectionFailing(server, 1002, "close frame too short") + self.assertIs(server.state, State.CLOSING) + + def test_client_receives_close_with_non_utf8_reason(self): + client = Connection(Side.CLIENT) + with self.assertRaises(UnicodeDecodeError) as raised: + client.receive_data(b"\x88\x04\x03\xe8\xff\xff") + self.assertEqual( + str(raised.exception), + "'utf-8' codec can't decode byte 0xff in position 0: invalid start byte", + ) + self.assertConnectionFailing(client, 1007, "invalid start byte at position 0") + self.assertIs(client.state, State.CLOSING) + + def test_server_receives_close_with_non_utf8_reason(self): + server = Connection(Side.SERVER) + with self.assertRaises(UnicodeDecodeError) as raised: + server.receive_data(b"\x88\x84\x00\x00\x00\x00\x03\xe9\xff\xff") + self.assertEqual( + str(raised.exception), + "'utf-8' codec can't decode byte 0xff in position 0: invalid start byte", + ) + self.assertConnectionFailing(server, 1007, "invalid start byte at position 0") + self.assertIs(server.state, State.CLOSING) + + +class PingTests(ConnectionTestCase): + """ + Test ping. See 5.5.2. Ping in RFC 6544. + + """ + + def test_client_sends_ping(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x44\x88\xcc"): + client.send_ping(b"") + self.assertEqual(client.bytes_to_send(), [b"\x89\x80\x00\x44\x88\xcc"]) + + def test_server_sends_ping(self): + server = Connection(Side.SERVER) + server.send_ping(b"") + self.assertEqual(server.bytes_to_send(), [b"\x89\x00"]) + + def test_client_receives_ping(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x89\x00") + self.assertFrameReceived( + client, Frame(True, OP_PING, b""), + ) + self.assertFrameSent( + client, Frame(True, OP_PONG, b""), + ) + + def test_server_receives_ping(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x89\x80\x00\x44\x88\xcc") + self.assertFrameReceived( + server, Frame(True, OP_PING, b""), + ) + self.assertFrameSent( + server, Frame(True, OP_PONG, b""), + ) + + def test_client_sends_ping_with_data(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x44\x88\xcc"): + client.send_ping(b"\x22\x66\xaa\xee") + self.assertEqual( + client.bytes_to_send(), [b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] + ) + + def test_server_sends_ping_with_data(self): + server = Connection(Side.SERVER) + server.send_ping(b"\x22\x66\xaa\xee") + self.assertEqual(server.bytes_to_send(), [b"\x89\x04\x22\x66\xaa\xee"]) + + def test_client_receives_ping_with_data(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x89\x04\x22\x66\xaa\xee") + self.assertFrameReceived( + client, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + ) + self.assertFrameSent( + client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + def test_server_receives_ping_with_data(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22") + self.assertFrameReceived( + server, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + ) + self.assertFrameSent( + server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + def test_client_sends_fragmented_ping_frame(self): + client = Connection(Side.CLIENT) + # This is only possible through a private API. + with self.assertRaises(ProtocolError) as raised: + client.send_frame(Frame(False, OP_PING, b"")) + self.assertEqual(str(raised.exception), "fragmented control frame") + + def test_server_sends_fragmented_ping_frame(self): + server = Connection(Side.SERVER) + # This is only possible through a private API. + with self.assertRaises(ProtocolError) as raised: + server.send_frame(Frame(False, OP_PING, b"")) + self.assertEqual(str(raised.exception), "fragmented control frame") + + def test_client_receives_fragmented_ping_frame(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x09\x00") + self.assertEqual(str(raised.exception), "fragmented control frame") + self.assertConnectionFailing(client, 1002, "fragmented control frame") + + def test_server_receives_fragmented_ping_frame(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x09\x80\x3c\x3c\x3c\x3c") + self.assertEqual(str(raised.exception), "fragmented control frame") + self.assertConnectionFailing(server, 1002, "fragmented control frame") + + def test_client_sends_ping_after_sending_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + # The spec says: "An endpoint MAY send a Ping frame any time (...) + # before the connection is closed" but websockets doesn't support + # sending a Ping frame after a Close frame. + with self.assertRaises(InvalidState) as raised: + client.send_ping(b"") + self.assertEqual( + str(raised.exception), "cannot write to a WebSocket in the CLOSING state" + ) + + def test_server_sends_ping_after_sending_close(self): + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + # The spec says: "An endpoint MAY send a Ping frame any time (...) + # before the connection is closed" but websockets doesn't support + # sending a Ping frame after a Close frame. + with self.assertRaises(InvalidState) as raised: + server.send_ping(b"") + self.assertEqual( + str(raised.exception), "cannot write to a WebSocket in the CLOSING state" + ) + + def test_client_receives_ping_after_receiving_close(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000) + client.receive_data(b"\x89\x04\x22\x66\xaa\xee") + self.assertFrameReceived( + client, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + ) + self.assertFrameSent(client, None) + + def test_server_receives_ping_after_receiving_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001) + server.receive_data(b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22") + self.assertFrameReceived( + server, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + ) + self.assertFrameSent(server, None) + + +class PongTests(ConnectionTestCase): + """ + Test pong frames. See 5.5.3. Pong in RFC 6544. + + """ + + def test_client_sends_pong(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x44\x88\xcc"): + client.send_pong(b"") + self.assertEqual(client.bytes_to_send(), [b"\x8a\x80\x00\x44\x88\xcc"]) + + def test_server_sends_pong(self): + server = Connection(Side.SERVER) + server.send_pong(b"") + self.assertEqual(server.bytes_to_send(), [b"\x8a\x00"]) + + def test_client_receives_pong(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x8a\x00") + self.assertFrameReceived( + client, Frame(True, OP_PONG, b""), + ) + + def test_server_receives_pong(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x8a\x80\x00\x44\x88\xcc") + self.assertFrameReceived( + server, Frame(True, OP_PONG, b""), + ) + + def test_client_sends_pong_with_data(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x44\x88\xcc"): + client.send_pong(b"\x22\x66\xaa\xee") + self.assertEqual( + client.bytes_to_send(), [b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] + ) + + def test_server_sends_pong_with_data(self): + server = Connection(Side.SERVER) + server.send_pong(b"\x22\x66\xaa\xee") + self.assertEqual(server.bytes_to_send(), [b"\x8a\x04\x22\x66\xaa\xee"]) + + def test_client_receives_pong_with_data(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x8a\x04\x22\x66\xaa\xee") + self.assertFrameReceived( + client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + def test_server_receives_pong_with_data(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22") + self.assertFrameReceived( + server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + def test_client_sends_fragmented_pong_frame(self): + client = Connection(Side.CLIENT) + # This is only possible through a private API. + with self.assertRaises(ProtocolError) as raised: + client.send_frame(Frame(False, OP_PONG, b"")) + self.assertEqual(str(raised.exception), "fragmented control frame") + + def test_server_sends_fragmented_pong_frame(self): + server = Connection(Side.SERVER) + # This is only possible through a private API. + with self.assertRaises(ProtocolError) as raised: + server.send_frame(Frame(False, OP_PONG, b"")) + self.assertEqual(str(raised.exception), "fragmented control frame") + + def test_client_receives_fragmented_pong_frame(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x0a\x00") + self.assertEqual(str(raised.exception), "fragmented control frame") + self.assertConnectionFailing(client, 1002, "fragmented control frame") + + def test_server_receives_fragmented_pong_frame(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x0a\x80\x3c\x3c\x3c\x3c") + self.assertEqual(str(raised.exception), "fragmented control frame") + self.assertConnectionFailing(server, 1002, "fragmented control frame") + + def test_client_sends_pong_after_sending_close(self): + client = Connection(Side.CLIENT) + with self.enforce_mask(b"\x00\x00\x00\x00"): + client.send_close(1001) + self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + # websockets doesn't support sending a Pong frame after a Close frame. + with self.assertRaises(InvalidState): + client.send_pong(b"") + + def test_server_sends_pong_after_sending_close(self): + server = Connection(Side.SERVER) + server.send_close(1000) + self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + # websockets doesn't support sending a Pong frame after a Close frame. + with self.assertRaises(InvalidState): + server.send_pong(b"") + + def test_client_receives_pong_after_receiving_close(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x02\x03\xe8") + self.assertConnectionClosing(client, 1000) + client.receive_data(b"\x8a\x04\x22\x66\xaa\xee") + self.assertFrameReceived( + client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + def test_server_receives_pong_after_receiving_close(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertConnectionClosing(server, 1001) + server.receive_data(b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22") + self.assertFrameReceived( + server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + ) + + +class FragmentationTests(ConnectionTestCase): + """ + Test message fragmentation. + + See 5.4. Fragmentation in RFC 6544. + + """ + + def test_client_send_ping_pong_in_fragmented_message(self): + client = Connection(Side.CLIENT) + client.send_text(b"Spam", fin=False) + self.assertFrameSent(client, Frame(False, OP_TEXT, b"Spam")) + client.send_ping(b"Ping") + self.assertFrameSent(client, Frame(True, OP_PING, b"Ping")) + client.send_continuation(b"Ham", fin=False) + self.assertFrameSent(client, Frame(False, OP_CONT, b"Ham")) + client.send_pong(b"Pong") + self.assertFrameSent(client, Frame(True, OP_PONG, b"Pong")) + client.send_continuation(b"Eggs", fin=True) + self.assertFrameSent(client, Frame(True, OP_CONT, b"Eggs")) + + def test_server_send_ping_pong_in_fragmented_message(self): + server = Connection(Side.SERVER) + server.send_text(b"Spam", fin=False) + self.assertFrameSent(server, Frame(False, OP_TEXT, b"Spam")) + server.send_ping(b"Ping") + self.assertFrameSent(server, Frame(True, OP_PING, b"Ping")) + server.send_continuation(b"Ham", fin=False) + self.assertFrameSent(server, Frame(False, OP_CONT, b"Ham")) + server.send_pong(b"Pong") + self.assertFrameSent(server, Frame(True, OP_PONG, b"Pong")) + server.send_continuation(b"Eggs", fin=True) + self.assertFrameSent(server, Frame(True, OP_CONT, b"Eggs")) + + def test_client_receive_ping_pong_in_fragmented_message(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x01\x04Spam") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, b"Spam"), + ) + client.receive_data(b"\x89\x04Ping") + self.assertFrameReceived( + client, Frame(True, OP_PING, b"Ping"), + ) + self.assertFrameSent( + client, Frame(True, OP_PONG, b"Ping"), + ) + client.receive_data(b"\x00\x03Ham") + self.assertFrameReceived( + client, Frame(False, OP_CONT, b"Ham"), + ) + client.receive_data(b"\x8a\x04Pong") + self.assertFrameReceived( + client, Frame(True, OP_PONG, b"Pong"), + ) + client.receive_data(b"\x80\x04Eggs") + self.assertFrameReceived( + client, Frame(True, OP_CONT, b"Eggs"), + ) + + def test_server_receive_ping_pong_in_fragmented_message(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x01\x84\x00\x00\x00\x00Spam") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, b"Spam"), + ) + server.receive_data(b"\x89\x84\x00\x00\x00\x00Ping") + self.assertFrameReceived( + server, Frame(True, OP_PING, b"Ping"), + ) + self.assertFrameSent( + server, Frame(True, OP_PONG, b"Ping"), + ) + server.receive_data(b"\x00\x83\x00\x00\x00\x00Ham") + self.assertFrameReceived( + server, Frame(False, OP_CONT, b"Ham"), + ) + server.receive_data(b"\x8a\x84\x00\x00\x00\x00Pong") + self.assertFrameReceived( + server, Frame(True, OP_PONG, b"Pong"), + ) + server.receive_data(b"\x80\x84\x00\x00\x00\x00Eggs") + self.assertFrameReceived( + server, Frame(True, OP_CONT, b"Eggs"), + ) + + def test_client_send_close_in_fragmented_message(self): + client = Connection(Side.CLIENT) + client.send_text(b"Spam", fin=False) + self.assertFrameSent(client, Frame(False, OP_TEXT, b"Spam")) + # The spec says: "An endpoint MUST be capable of handling control + # frames in the middle of a fragmented message." However, since the + # endpoint must not send a data frame after a close frame, a close + # frame can't be "in the middle" of a fragmented message. + with self.assertRaises(ProtocolError) as raised: + client.send_close(1001) + self.assertEqual(str(raised.exception), "expected a continuation frame") + client.send_continuation(b"Eggs", fin=True) + + def test_server_send_close_in_fragmented_message(self): + server = Connection(Side.CLIENT) + server.send_text(b"Spam", fin=False) + self.assertFrameSent(server, Frame(False, OP_TEXT, b"Spam")) + # The spec says: "An endpoint MUST be capable of handling control + # frames in the middle of a fragmented message." However, since the + # endpoint must not send a data frame after a close frame, a close + # frame can't be "in the middle" of a fragmented message. + with self.assertRaises(ProtocolError) as raised: + server.send_close(1000) + self.assertEqual(str(raised.exception), "expected a continuation frame") + + def test_client_receive_close_in_fragmented_message(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x01\x04Spam") + self.assertFrameReceived( + client, Frame(False, OP_TEXT, b"Spam"), + ) + # The spec says: "An endpoint MUST be capable of handling control + # frames in the middle of a fragmented message." However, since the + # endpoint must not send a data frame after a close frame, a close + # frame can't be "in the middle" of a fragmented message. + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\x88\x02\x03\xe8") + self.assertEqual(str(raised.exception), "incomplete fragmented message") + self.assertConnectionFailing(client, 1002, "incomplete fragmented message") + + def test_server_receive_close_in_fragmented_message(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x01\x84\x00\x00\x00\x00Spam") + self.assertFrameReceived( + server, Frame(False, OP_TEXT, b"Spam"), + ) + # The spec says: "An endpoint MUST be capable of handling control + # frames in the middle of a fragmented message." However, since the + # endpoint must not send a data frame after a close frame, a close + # frame can't be "in the middle" of a fragmented message. + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\x88\x82\x00\x00\x00\x00\x03\xe9") + self.assertEqual(str(raised.exception), "incomplete fragmented message") + self.assertConnectionFailing(server, 1002, "incomplete fragmented message") + + +class EOFTests(ConnectionTestCase): + """ + Test connection termination. + + """ + + def test_client_receives_eof(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x00") + self.assertConnectionClosing(client) + client.receive_eof() # does not raise an exception + + def test_server_receives_eof(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertConnectionClosing(server) + server.receive_eof() # does not raise an exception + + def test_client_receives_eof_between_frames(self): + client = Connection(Side.CLIENT) + with self.assertRaises(EOFError) as raised: + client.receive_eof() + self.assertEqual(str(raised.exception), "unexpected end of stream") + + def test_server_receives_eof_between_frames(self): + server = Connection(Side.SERVER) + with self.assertRaises(EOFError) as raised: + server.receive_eof() + self.assertEqual(str(raised.exception), "unexpected end of stream") + + def test_client_receives_eof_inside_frame(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x81") + with self.assertRaises(EOFError) as raised: + client.receive_eof() + self.assertEqual( + str(raised.exception), "stream ends after 1 bytes, expected 2 bytes" + ) + + def test_server_receives_eof_inside_frame(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x81") + with self.assertRaises(EOFError) as raised: + server.receive_eof() + self.assertEqual( + str(raised.exception), "stream ends after 1 bytes, expected 2 bytes" + ) + + def test_client_receives_data_after_exception(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\xff\xff") + self.assertEqual(str(raised.exception), "invalid opcode") + with self.assertRaises(RuntimeError) as raised: + client.receive_data(b"\x00\x00") + self.assertEqual( + str(raised.exception), "cannot receive data or EOF after an error" + ) + + def test_server_receives_data_after_exception(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\xff\xff") + self.assertEqual(str(raised.exception), "invalid opcode") + with self.assertRaises(RuntimeError) as raised: + server.receive_data(b"\x00\x00") + self.assertEqual( + str(raised.exception), "cannot receive data or EOF after an error" + ) + + def test_client_receives_eof_after_exception(self): + client = Connection(Side.CLIENT) + with self.assertRaises(ProtocolError) as raised: + client.receive_data(b"\xff\xff") + self.assertEqual(str(raised.exception), "invalid opcode") + with self.assertRaises(RuntimeError) as raised: + client.receive_eof() + self.assertEqual( + str(raised.exception), "cannot receive data or EOF after an error" + ) + + def test_server_receives_eof_after_exception(self): + server = Connection(Side.SERVER) + with self.assertRaises(ProtocolError) as raised: + server.receive_data(b"\xff\xff") + self.assertEqual(str(raised.exception), "invalid opcode") + with self.assertRaises(RuntimeError) as raised: + server.receive_eof() + self.assertEqual( + str(raised.exception), "cannot receive data or EOF after an error" + ) + + def test_client_receives_data_after_eof(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x00") + self.assertConnectionClosing(client) + client.receive_eof() + with self.assertRaises(EOFError) as raised: + client.receive_data(b"\x88\x00") + self.assertEqual(str(raised.exception), "stream ended") + + def test_server_receives_data_after_eof(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertConnectionClosing(server) + server.receive_eof() + with self.assertRaises(EOFError) as raised: + server.receive_data(b"\x88\x80\x00\x00\x00\x00") + self.assertEqual(str(raised.exception), "stream ended") + + def test_client_receives_eof_after_eof(self): + client = Connection(Side.CLIENT) + client.receive_data(b"\x88\x00") + self.assertConnectionClosing(client) + client.receive_eof() + with self.assertRaises(EOFError) as raised: + client.receive_eof() + self.assertEqual(str(raised.exception), "stream ended") + + def test_server_receives_eof_after_eof(self): + server = Connection(Side.SERVER) + server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") + self.assertConnectionClosing(server) + server.receive_eof() + with self.assertRaises(EOFError) as raised: + server.receive_eof() + self.assertEqual(str(raised.exception), "stream ended") + + +class ErrorTests(ConnectionTestCase): + """ + Test other error cases. + + """ + + def test_client_hits_internal_error_reading_frame(self): + client = Connection(Side.CLIENT) + # This isn't supposed to happen, so we're simulating it. + with unittest.mock.patch("struct.unpack", side_effect=RuntimeError("BOOM")): + with self.assertRaises(RuntimeError) as raised: + client.receive_data(b"\x81\x00") + self.assertEqual(str(raised.exception), "BOOM") + self.assertConnectionFailing(client, 1011, "") + + def test_server_hits_internal_error_reading_frame(self): + server = Connection(Side.SERVER) + # This isn't supposed to happen, so we're simulating it. + with unittest.mock.patch("struct.unpack", side_effect=RuntimeError("BOOM")): + with self.assertRaises(RuntimeError) as raised: + server.receive_data(b"\x81\x80\x00\x00\x00\x00") + self.assertEqual(str(raised.exception), "BOOM") + self.assertConnectionFailing(server, 1011, "") + + +class ExtensionsTests(ConnectionTestCase): + """ + Test how extensions affect frames. + + """ + + def test_client_extension_encodes_frame(self): + client = Connection(Side.CLIENT) + client.extensions = [Rsv2Extension()] + with self.enforce_mask(b"\x00\x44\x88\xcc"): + client.send_ping(b"") + self.assertEqual(client.bytes_to_send(), [b"\xa9\x80\x00\x44\x88\xcc"]) + + def test_server_extension_encodes_frame(self): + server = Connection(Side.SERVER) + server.extensions = [Rsv2Extension()] + server.send_ping(b"") + self.assertEqual(server.bytes_to_send(), [b"\xa9\x00"]) + + def test_client_extension_decodes_frame(self): + client = Connection(Side.CLIENT) + client.extensions = [Rsv2Extension()] + client.receive_data(b"\xaa\x00") + self.assertEqual(client.events_received(), [Frame(True, OP_PONG, b"")]) + + def test_server_extension_decodes_frame(self): + server = Connection(Side.SERVER) + server.extensions = [Rsv2Extension()] + server.receive_data(b"\xaa\x80\x00\x44\x88\xcc") + self.assertEqual(server.events_received(), [Frame(True, OP_PONG, b"")]) diff --git a/tests/test_frames.py b/tests/test_frames.py index 37a73b2df..514fe7c54 100644 --- a/tests/test_frames.py +++ b/tests/test_frames.py @@ -9,8 +9,15 @@ from .utils import GeneratorTestCase -class FrameTests(GeneratorTestCase): - def parse(self, data, mask=False, max_size=None, extensions=None): +class FramesTestCase(GeneratorTestCase): + def enforce_mask(self, mask): + return unittest.mock.patch("secrets.token_bytes", return_value=mask) + + def parse(self, data, mask, max_size=None, extensions=None): + """ + Parse a frame from a bytestring. + + """ reader = StreamReader() reader.feed_data(data) reader.feed_eof() @@ -19,117 +26,134 @@ def parse(self, data, mask=False, max_size=None, extensions=None): ) return self.assertGeneratorReturns(parser) - def round_trip(self, data, frame, mask=False, extensions=None): + def assertFrameData(self, frame, data, mask, extensions=None): + """ + Serializing frame yields data. Parsing data yields frame. + + """ + # Compare frames first, because test failures are easier to read, + # especially when mask = True. parsed = self.parse(data, mask=mask, extensions=extensions) self.assertEqual(parsed, frame) # Make masking deterministic by reusing the same "random" mask. # This has an effect only when mask is True. mask_bytes = data[2:6] if mask else b"" - with unittest.mock.patch("secrets.token_bytes", return_value=mask_bytes): - serialized = parsed.serialize(mask=mask, extensions=extensions) + with self.enforce_mask(mask_bytes): + serialized = frame.serialize(mask=mask, extensions=extensions) self.assertEqual(serialized, data) - def test_text(self): - self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) + +class FrameTests(FramesTestCase): + def test_text_unmasked(self): + self.assertFrameData( + Frame(True, OP_TEXT, b"Spam"), b"\x81\x04Spam", mask=False, + ) def test_text_masked(self): - self.round_trip( - b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", + self.assertFrameData( Frame(True, OP_TEXT, b"Spam"), + b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", mask=True, ) - def test_binary(self): - self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs")) + def test_binary_unmasked(self): + self.assertFrameData( + Frame(True, OP_BINARY, b"Eggs"), b"\x82\x04Eggs", mask=False, + ) def test_binary_masked(self): - self.round_trip( - b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", + self.assertFrameData( Frame(True, OP_BINARY, b"Eggs"), + b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", mask=True, ) - def test_non_ascii_text(self): - self.round_trip( - b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8")) + def test_non_ascii_text_unmasked(self): + self.assertFrameData( + Frame(True, OP_TEXT, "café".encode("utf-8")), + b"\x81\x05caf\xc3\xa9", + mask=False, ) def test_non_ascii_text_masked(self): - self.round_trip( - b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", + self.assertFrameData( Frame(True, OP_TEXT, "café".encode("utf-8")), + b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", mask=True, ) def test_close(self): - self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b"")) + self.assertFrameData(Frame(True, OP_CLOSE, b""), b"\x88\x00", mask=False) def test_ping(self): - self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping")) + self.assertFrameData(Frame(True, OP_PING, b"ping"), b"\x89\x04ping", mask=False) def test_pong(self): - self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong")) + self.assertFrameData(Frame(True, OP_PONG, b"pong"), b"\x8a\x04pong", mask=False) def test_long(self): - self.round_trip( - b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a") + self.assertFrameData( + Frame(True, OP_BINARY, 126 * b"a"), + b"\x82\x7e\x00\x7e" + 126 * b"a", + mask=False, ) def test_very_long(self): - self.round_trip( - b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", + self.assertFrameData( Frame(True, OP_BINARY, 65536 * b"a"), + b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", + mask=False, ) def test_payload_too_big(self): with self.assertRaises(PayloadTooBig): - self.parse(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024) + self.parse(b"\x82\x7e\x04\x01" + 1025 * b"a", mask=False, max_size=1024) def test_bad_reserved_bits(self): for data in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: with self.subTest(data=data): with self.assertRaises(ProtocolError): - self.parse(data) + self.parse(data, mask=False) def test_good_opcode(self): for opcode in list(range(0x00, 0x03)) + list(range(0x08, 0x0B)): data = bytes([0x80 | opcode, 0]) with self.subTest(data=data): - self.parse(data) # does not raise an exception + self.parse(data, mask=False) # does not raise an exception def test_bad_opcode(self): for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)): data = bytes([0x80 | opcode, 0]) with self.subTest(data=data): with self.assertRaises(ProtocolError): - self.parse(data) + self.parse(data, mask=False) def test_mask_flag(self): # Mask flag correctly set. self.parse(b"\x80\x80\x00\x00\x00\x00", mask=True) # Mask flag incorrectly unset. with self.assertRaises(ProtocolError): - self.parse(b"\x80\x80\x00\x00\x00\x00") + self.parse(b"\x80\x80\x00\x00\x00\x00", mask=False) # Mask flag correctly unset. - self.parse(b"\x80\x00") + self.parse(b"\x80\x00", mask=False) # Mask flag incorrectly set. with self.assertRaises(ProtocolError): self.parse(b"\x80\x00", mask=True) def test_control_frame_max_length(self): # At maximum allowed length. - self.parse(b"\x88\x7e\x00\x7d" + 125 * b"a") + self.parse(b"\x88\x7e\x00\x7d" + 125 * b"a", mask=False) # Above maximum allowed length. with self.assertRaises(ProtocolError): - self.parse(b"\x88\x7e\x00\x7e" + 126 * b"a") + self.parse(b"\x88\x7e\x00\x7e" + 126 * b"a", mask=False) def test_fragmented_control_frame(self): # Fin bit correctly set. - self.parse(b"\x88\x00") + self.parse(b"\x88\x00", mask=False) # Fin bit incorrectly unset. with self.assertRaises(ProtocolError): - self.parse(b"\x08\x00") + self.parse(b"\x08\x00", mask=False) def test_extensions(self): class Rot13: @@ -145,8 +169,11 @@ def encode(frame): def decode(frame, *, max_size=None): return Rot13.encode(frame) - self.round_trip( - b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()] + self.assertFrameData( + Frame(True, OP_TEXT, b"hello"), + b"\x81\x05uryyb", + mask=False, + extensions=[Rot13()], ) @@ -205,15 +232,19 @@ def test_prepare_ctrl_none(self): class ParseAndSerializeCloseTests(unittest.TestCase): - def round_trip(self, data, code, reason): - parsed = parse_close(data) - self.assertEqual(parsed, (code, reason)) + def assertCloseData(self, code, reason, data): + """ + Serializing code / reason yields data. Parsing data yields code / reason. + + """ serialized = serialize_close(code, reason) self.assertEqual(serialized, data) + parsed = parse_close(data) + self.assertEqual(parsed, (code, reason)) def test_parse_close_and_serialize_close(self): - self.round_trip(b"\x03\xe8", 1000, "") - self.round_trip(b"\x03\xe8OK", 1000, "OK") + self.assertCloseData(1000, "", b"\x03\xe8") + self.assertCloseData(1000, "OK", b"\x03\xe8OK") def test_parse_close_empty(self): self.assertEqual(parse_close(b""), (1005, "")) From fad4c57d4d84cb884bd30ebe44e07ace4d5f4cfb Mon Sep 17 00:00:00 2001 From: akgnah <1024@setq.me> Date: Mon, 2 Mar 2020 13:01:51 +0800 Subject: [PATCH 210/281] fix typo in example/counter.py --- example/counter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/counter.py b/example/counter.py index dbbbe5935..239ec203a 100755 --- a/example/counter.py +++ b/example/counter.py @@ -58,7 +58,7 @@ async def counter(websocket, path): STATE["value"] += 1 await notify_state() else: - logging.error("unsupported event: {}", data) + logging.error("unsupported event: %s", data) finally: await unregister(websocket) From 458c4d67faaaf52359f713aafc3eda26afb1de3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thomas=20L=C3=89VEIL?= Date: Thu, 9 Apr 2020 01:09:36 +0200 Subject: [PATCH 211/281] support request lines of 4107 bytes fix #743 avoid sending a `HTTP 400` response when popular browsers send a request with cookies maxing up the user-agent limit --- src/websockets/http11.py | 2 +- src/websockets/http_legacy.py | 2 +- tests/test_http11.py | 4 ++-- tests/test_http_legacy.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/websockets/http11.py b/src/websockets/http11.py index 58ee09253..693a20e54 100644 --- a/src/websockets/http11.py +++ b/src/websockets/http11.py @@ -6,7 +6,7 @@ MAX_HEADERS = 256 -MAX_LINE = 4096 +MAX_LINE = 4107 def d(value: bytes) -> str: diff --git a/src/websockets/http_legacy.py b/src/websockets/http_legacy.py index 3630d3593..0bc548b31 100644 --- a/src/websockets/http_legacy.py +++ b/src/websockets/http_legacy.py @@ -9,7 +9,7 @@ __all__ = ["read_request", "read_response"] MAX_HEADERS = 256 -MAX_LINE = 4096 +MAX_LINE = 4107 def d(value: bytes) -> str: diff --git a/tests/test_http11.py b/tests/test_http11.py index 4574cf97e..87be6e486 100644 --- a/tests/test_http11.py +++ b/tests/test_http11.py @@ -260,8 +260,8 @@ def test_parse_too_long_value(self): next(self.parse_headers()) def test_parse_too_long_line(self): - # Header line contains 5 + 4090 + 2 = 4097 bytes. - self.reader.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") + # Header line contains 5 + 4101 + 2 = 4108 bytes. + self.reader.feed_data(b"foo: " + b"a" * 4101 + b"\r\n\r\n") with self.assertRaises(SecurityError): next(self.parse_headers()) diff --git a/tests/test_http_legacy.py b/tests/test_http_legacy.py index 3b43a6274..667aff52a 100644 --- a/tests/test_http_legacy.py +++ b/tests/test_http_legacy.py @@ -124,8 +124,8 @@ async def test_headers_limit(self): await read_headers(self.stream) async def test_line_limit(self): - # Header line contains 5 + 4090 + 2 = 4097 bytes. - self.stream.feed_data(b"foo: " + b"a" * 4090 + b"\r\n\r\n") + # Header line contains 5 + 4101 + 2 = 4108 bytes. + self.stream.feed_data(b"foo: " + b"a" * 4101 + b"\r\n\r\n") with self.assertRaises(SecurityError): await read_headers(self.stream) From f056c1cfb8ef417180bf337308aa73e49c9469b4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 21:10:56 +0200 Subject: [PATCH 212/281] Adjust max header size (again). See #743 for the rationale. --- src/websockets/http11.py | 2 +- src/websockets/http_legacy.py | 2 +- tests/test_http11.py | 4 ++-- tests/test_http_legacy.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/websockets/http11.py b/src/websockets/http11.py index 693a20e54..0754ddabb 100644 --- a/src/websockets/http11.py +++ b/src/websockets/http11.py @@ -6,7 +6,7 @@ MAX_HEADERS = 256 -MAX_LINE = 4107 +MAX_LINE = 4110 def d(value: bytes) -> str: diff --git a/src/websockets/http_legacy.py b/src/websockets/http_legacy.py index 0bc548b31..5afe5f898 100644 --- a/src/websockets/http_legacy.py +++ b/src/websockets/http_legacy.py @@ -9,7 +9,7 @@ __all__ = ["read_request", "read_response"] MAX_HEADERS = 256 -MAX_LINE = 4107 +MAX_LINE = 4110 def d(value: bytes) -> str: diff --git a/tests/test_http11.py b/tests/test_http11.py index 87be6e486..9e4d70620 100644 --- a/tests/test_http11.py +++ b/tests/test_http11.py @@ -260,8 +260,8 @@ def test_parse_too_long_value(self): next(self.parse_headers()) def test_parse_too_long_line(self): - # Header line contains 5 + 4101 + 2 = 4108 bytes. - self.reader.feed_data(b"foo: " + b"a" * 4101 + b"\r\n\r\n") + # Header line contains 5 + 4104 + 2 = 4111 bytes. + self.reader.feed_data(b"foo: " + b"a" * 4104 + b"\r\n\r\n") with self.assertRaises(SecurityError): next(self.parse_headers()) diff --git a/tests/test_http_legacy.py b/tests/test_http_legacy.py index 667aff52a..e4c75315e 100644 --- a/tests/test_http_legacy.py +++ b/tests/test_http_legacy.py @@ -124,8 +124,8 @@ async def test_headers_limit(self): await read_headers(self.stream) async def test_line_limit(self): - # Header line contains 5 + 4101 + 2 = 4108 bytes. - self.stream.feed_data(b"foo: " + b"a" * 4101 + b"\r\n\r\n") + # Header line contains 5 + 4104 + 2 = 4111 bytes. + self.stream.feed_data(b"foo: " + b"a" * 4104 + b"\r\n\r\n") with self.assertRaises(SecurityError): await read_headers(self.stream) From 639b993a236107f22d529cde488d1e1eb6645228 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 21:38:15 +0200 Subject: [PATCH 213/281] Create correct Host header for IPv6. Fix #802. --- src/websockets/asyncio_client.py | 7 ++----- src/websockets/client.py | 9 ++++----- src/websockets/http.py | 26 +++++++++++++++++++++++++- tests/test_http.py | 29 +++++++++++++++++++++++++++-- 4 files changed, 58 insertions(+), 13 deletions(-) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py index f95dae060..e01a641cb 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/asyncio_client.py @@ -31,7 +31,7 @@ parse_extension, parse_subprotocol, ) -from .http import USER_AGENT +from .http import USER_AGENT, build_host from .http_legacy import read_response from .protocol import WebSocketCommonProtocol from .typing import ExtensionHeader, Origin, Subprotocol @@ -251,10 +251,7 @@ async def handshake( """ request_headers = Headers() - if wsuri.port == (443 if wsuri.secure else 80): # pragma: no cover - request_headers["Host"] = wsuri.host - else: - request_headers["Host"] = f"{wsuri.host}:{wsuri.port}" + request_headers["Host"] = build_host(wsuri.host, wsuri.port, wsuri.secure) if wsuri.user_info: request_headers["Authorization"] = build_authorization_basic( diff --git a/src/websockets/client.py b/src/websockets/client.py index 3f9777b94..a7bfcc4ee 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -23,7 +23,7 @@ parse_subprotocol, parse_upgrade, ) -from .http import USER_AGENT +from .http import USER_AGENT, build_host from .http11 import Request, Response from .typing import ( ConnectionOption, @@ -71,10 +71,9 @@ def connect(self) -> Request: """ headers = Headers() - if self.wsuri.port == (443 if self.wsuri.secure else 80): - headers["Host"] = self.wsuri.host - else: - headers["Host"] = f"{self.wsuri.host}:{self.wsuri.port}" + headers["Host"] = build_host( + self.wsuri.host, self.wsuri.port, self.wsuri.secure + ) if self.wsuri.user_info: headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info) diff --git a/src/websockets/http.py b/src/websockets/http.py index 850b9beaa..ed3fe48d0 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -1,4 +1,5 @@ import asyncio +import ipaddress import sys import warnings from typing import Tuple @@ -9,13 +10,36 @@ from .version import version as websockets_version -__all__ = ["USER_AGENT"] +__all__ = ["USER_AGENT", "build_host"] PYTHON_VERSION = "{}.{}".format(*sys.version_info) USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}" +def build_host(host: str, port: int, secure: bool) -> str: + """ + Build a ``Host`` header. + + """ + # https://tools.ietf.org/html/rfc3986#section-3.2.2 + # IPv6 addresses must be enclosed in brackets. + try: + address = ipaddress.ip_address(host) + except ValueError: + # host is a hostname + pass + else: + # host is an IP address + if address.version == 6: + host = f"[{host}]" + + if port != (443 if secure else 80): + host = f"{host}:{port}" + + return host + + # Backwards compatibility with previously documented public APIs diff --git a/tests/test_http.py b/tests/test_http.py index 322650354..ca7c1c0a4 100644 --- a/tests/test_http.py +++ b/tests/test_http.py @@ -1,2 +1,27 @@ -# Check that the legacy http module imports without an exception. -from websockets.http import * # noqa +import unittest + +from websockets.http import * + + +class HTTPTests(unittest.TestCase): + def test_build_host(self): + for (host, port, secure), result in [ + (("localhost", 80, False), "localhost"), + (("localhost", 8000, False), "localhost:8000"), + (("localhost", 443, True), "localhost"), + (("localhost", 8443, True), "localhost:8443"), + (("example.com", 80, False), "example.com"), + (("example.com", 8000, False), "example.com:8000"), + (("example.com", 443, True), "example.com"), + (("example.com", 8443, True), "example.com:8443"), + (("127.0.0.1", 80, False), "127.0.0.1"), + (("127.0.0.1", 8000, False), "127.0.0.1:8000"), + (("127.0.0.1", 443, True), "127.0.0.1"), + (("127.0.0.1", 8443, True), "127.0.0.1:8443"), + (("::1", 80, False), "[::1]"), + (("::1", 8000, False), "[::1]:8000"), + (("::1", 443, True), "[::1]"), + (("::1", 8443, True), "[::1]:8443"), + ]: + with self.subTest(host=host, port=port, secure=secure): + self.assertEqual(build_host(host, port, secure), result) From 6466e238f4809e81579f70460563fa0d00b7905a Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 21:49:05 +0200 Subject: [PATCH 214/281] Raise a good error when sending a dict. This must be a common mistake. Fix #734. --- src/websockets/protocol.py | 10 ++++++++++ tests/test_protocol.py | 5 +++++ 2 files changed, 15 insertions(+) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 2e5d95e06..92ce8e305 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -25,6 +25,7 @@ Dict, Iterable, List, + Mapping, Optional, Union, cast, @@ -548,6 +549,10 @@ async def send( :meth:`send` will raise a :exc:`TypeError` and the connection will be closed. + :meth:`send` rejects dict-like objects because this is often an error. + If you wish to send the keys of a dict-like object as fragments, call + its :meth:`~dict.keys` method and pass the result to :meth:`send`. + Canceling :meth:`send` is discouraged. Instead, you should close the connection with :meth:`close`. Indeed, there only two situations where :meth:`send` yields control to the event loop: @@ -576,6 +581,11 @@ async def send( opcode, data = prepare_data(message) await self.write_frame(True, opcode, data) + # Catch a common mistake -- passing a dict to send(). + + elif isinstance(message, Mapping): + raise TypeError("data is a dict-like object") + # Fragmented message -- regular iterator. elif isinstance(message, Iterable): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 3054600e1..432c31ef5 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -584,6 +584,11 @@ def test_send_binary_from_non_contiguous_memoryview(self): self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2])) self.assertOneFrameSent(True, OP_BINARY, b"tea") + def test_send_dict(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send({"not": "encoded"})) + self.assertNoFrameSent() + def test_send_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.send(42)) From 97ae02b4560516f577b265ef222fff5fb3e950b6 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 22:05:31 +0200 Subject: [PATCH 215/281] Document pitfall. Fix #335. --- docs/faq.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/faq.rst b/docs/faq.rst index cea3f5358..5e6439055 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -204,6 +204,13 @@ There are several reasons why long-lived connections may be lost: If you're facing a reproducible issue, :ref:`enable debug logs ` to see when and how connections are closed. +Why do I get the error: ``module 'websockets' has no attribute '...'``? +....................................................................... + +Often, this is because you created a script called ``websockets.py`` in your +current working directory. Then ``import websockets`` imports this module +instead of the websockets library. + Are there ``onopen``, ``onmessage``, ``onerror``, and ``onclose`` callbacks? ............................................................................ From 0a1195eed14eddb3f27929ef49af4024814c3f37 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 26 Jul 2020 22:48:28 +0200 Subject: [PATCH 216/281] Type create_protocol arguments as callables. Fix #764. --- src/websockets/asyncio_client.py | 4 ++-- src/websockets/asyncio_server.py | 2 +- src/websockets/auth.py | 13 +++++++++---- tests/test_auth.py | 22 +++++++++++++++++++++- 4 files changed, 33 insertions(+), 8 deletions(-) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py index e01a641cb..efa29b69a 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/asyncio_client.py @@ -9,7 +9,7 @@ import logging import warnings from types import TracebackType -from typing import Any, Generator, List, Optional, Sequence, Tuple, Type, cast +from typing import Any, Callable, Generator, List, Optional, Sequence, Tuple, Type, cast from .datastructures import Headers, HeadersLike from .exceptions import ( @@ -373,7 +373,7 @@ def __init__( uri: str, *, path: Optional[str] = None, - create_protocol: Optional[Type[WebSocketClientProtocol]] = None, + create_protocol: Optional[Callable[[Any], WebSocketClientProtocol]] = None, ping_interval: Optional[float] = 20, ping_timeout: Optional[float] = 20, close_timeout: Optional[float] = None, diff --git a/src/websockets/asyncio_server.py b/src/websockets/asyncio_server.py index 89ddf6c7d..fe61c7ddc 100644 --- a/src/websockets/asyncio_server.py +++ b/src/websockets/asyncio_server.py @@ -850,7 +850,7 @@ def __init__( port: Optional[int] = None, *, path: Optional[str] = None, - create_protocol: Optional[Type[WebSocketServerProtocol]] = None, + create_protocol: Optional[Callable[[Any], WebSocketServerProtocol]] = None, ping_interval: Optional[float] = 20, ping_timeout: Optional[float] = 20, close_timeout: Optional[float] = None, diff --git a/src/websockets/auth.py b/src/websockets/auth.py index 03e8536c5..c1b7a0b1a 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -7,7 +7,7 @@ import functools import http -from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Type, Union +from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast from .asyncio_server import HTTPResponse, WebSocketServerProtocol from .datastructures import Headers @@ -90,9 +90,7 @@ def basic_auth_protocol_factory( realm: str, credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None, check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, - create_protocol: Type[ - BasicAuthWebSocketServerProtocol - ] = BasicAuthWebSocketServerProtocol, + create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None, ) -> Callable[[Any], BasicAuthWebSocketServerProtocol]: """ Protocol factory that enforces HTTP Basic Auth. @@ -155,6 +153,13 @@ async def check_credentials(username: str, password: str) -> bool: else: raise TypeError(f"invalid credentials argument: {credentials}") + if create_protocol is None: + # Not sure why mypy cannot figure this out. + create_protocol = cast( + Callable[[Any], BasicAuthWebSocketServerProtocol], + BasicAuthWebSocketServerProtocol, + ) + return functools.partial( create_protocol, realm=realm, check_credentials=check_credentials ) diff --git a/tests/test_auth.py b/tests/test_auth.py index c693c9f45..68642389e 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -19,6 +19,12 @@ def test_is_not_credentials(self): self.assertFalse(is_credentials("username")) +class CustomWebSocketServerProtocol(BasicAuthWebSocketServerProtocol): + async def process_request(self, path, request_headers): + type(self).used = True + return await super().process_request(path, request_headers) + + class AuthClientServerTests(ClientServerTestsMixin, AsyncioTestCase): create_protocol = basic_auth_protocol_factory( @@ -73,7 +79,7 @@ async def check_credentials(username, password): return password == "iloveyou" create_protocol_check_credentials = basic_auth_protocol_factory( - realm="auth-tests", check_credentials=check_credentials + realm="auth-tests", check_credentials=check_credentials, ) @with_server(create_protocol=create_protocol_check_credentials) @@ -82,6 +88,20 @@ def test_basic_auth_check_credentials(self): self.loop.run_until_complete(self.client.send("Hello!")) self.loop.run_until_complete(self.client.recv()) + create_protocol_custom_protocol = basic_auth_protocol_factory( + realm="auth-tests", + credentials=[("hello", "iloveyou")], + create_protocol=CustomWebSocketServerProtocol, + ) + + @with_server(create_protocol=create_protocol_custom_protocol) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_custom_protocol(self): + self.assertTrue(CustomWebSocketServerProtocol.used) + del CustomWebSocketServerProtocol.used + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + @with_server(create_protocol=create_protocol) def test_basic_auth_missing_credentials(self): with self.assertRaises(InvalidStatusCode) as raised: From cb91aa1575066f6624944cb75bb41d68a45d1b45 Mon Sep 17 00:00:00 2001 From: Janakarajan Natarajan Date: Tue, 18 Aug 2020 22:52:03 +0000 Subject: [PATCH 217/281] Add aarch64 wheel build --- .travis.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.travis.yml b/.travis.yml index 26e1de60e..e31c9ea0b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,6 +13,13 @@ matrix: python: "3.7" services: - docker + - language: python + dist: xenial + sudo: required + python: "3.7" + arch: arm64 + services: + - docker - os: osx osx_image: xcode8.3 From c39268c4867e41d11c20f7859583761d52a04012 Mon Sep 17 00:00:00 2001 From: Ram Rachum Date: Mon, 27 Jul 2020 14:06:08 +0300 Subject: [PATCH 218/281] Fix exception causes in handshake_legacy.py --- src/websockets/handshake_legacy.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/websockets/handshake_legacy.py b/src/websockets/handshake_legacy.py index 7e6acc77d..d34ca5f7f 100644 --- a/src/websockets/handshake_legacy.py +++ b/src/websockets/handshake_legacy.py @@ -91,28 +91,28 @@ def check_request(headers: Headers) -> str: try: s_w_key = headers["Sec-WebSocket-Key"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Key") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Key") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found" - ) + ) from exc try: raw_key = base64.b64decode(s_w_key.encode(), validate=True) - except binascii.Error: - raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) + except binascii.Error as exc: + raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) from exc if len(raw_key) != 16: raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) try: s_w_version = headers["Sec-WebSocket-Version"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Version") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Version") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found" - ) + ) from exc if s_w_version != "13": raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version) @@ -168,12 +168,12 @@ def check_response(headers: Headers, key: str) -> None: try: s_w_accept = headers["Sec-WebSocket-Accept"] - except KeyError: - raise InvalidHeader("Sec-WebSocket-Accept") - except MultipleValuesError: + except KeyError as exc: + raise InvalidHeader("Sec-WebSocket-Accept") from exc + except MultipleValuesError as exc: raise InvalidHeader( "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found" - ) + ) from exc if s_w_accept != accept(key): raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept) From 69cf86724dc2a86f7e57f6393dd322a249dbee17 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 13:29:09 +0100 Subject: [PATCH 219/281] Move question to the FAQ. It was written in the cheatsheet before there was a FAQ. --- docs/cheatsheet.rst | 22 ---------------------- docs/faq.rst | 20 ++++++++++++++++++++ 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/docs/cheatsheet.rst b/docs/cheatsheet.rst index f897326a6..4b95c9eea 100644 --- a/docs/cheatsheet.rst +++ b/docs/cheatsheet.rst @@ -85,25 +85,3 @@ in particular. Fortunately Python's official documentation provides advice to .. _develop with asyncio: https://docs.python.org/3/library/asyncio-dev.html -Passing additional arguments to the connection handler ------------------------------------------------------- - -When writing a server, if you need to pass additional arguments to the -connection handler, you can bind them with :func:`functools.partial`:: - - import asyncio - import functools - import websockets - - async def handler(websocket, path, extra_argument): - ... - - bound_handler = functools.partial(handler, extra_argument='spam') - start_server = websockets.serve(bound_handler, '127.0.0.1', 8765) - - asyncio.get_event_loop().run_until_complete(start_server) - asyncio.get_event_loop().run_forever() - -Another way to achieve this result is to define the ``handler`` coroutine in -a scope where the ``extra_argument`` variable exists instead of injecting it -through an argument. diff --git a/docs/faq.rst b/docs/faq.rst index 5e6439055..5748521f0 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -56,6 +56,26 @@ See also Python's documentation about `running blocking code`_. .. _running blocking code: https://docs.python.org/3/library/asyncio-dev.html#running-blocking-code +How can I pass additional arguments to the connection handler? +.............................................................. + +You can bind additional arguments to the connection handler with +:func:`functools.partial`:: + + import asyncio + import functools + import websockets + + async def handler(websocket, path, extra_argument): + ... + + bound_handler = functools.partial(handler, extra_argument='spam') + start_server = websockets.serve(bound_handler, ...) + +Another way to achieve this result is to define the ``handler`` coroutine in +a scope where the ``extra_argument`` variable exists instead of injecting it +through an argument. + How do I get access HTTP headers, for example cookies? ...................................................... From a64136c869c527808c337b13e6dace43ad9d674e Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 13:31:38 +0100 Subject: [PATCH 220/281] Remove unfinished sentence. --- docs/faq.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/faq.rst b/docs/faq.rst index 5748521f0..cd0033734 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -85,8 +85,6 @@ To access HTTP headers during the WebSocket handshake, you can override async def process_request(self, path, request_headers): cookies = request_header["Cookie"] -See - Once the connection is established, they're available in :attr:`~protocol.WebSocketServerProtocol.request_headers`:: From b331e6c9c3d2cfd3d768aa81e396a9e2f977cf88 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 13:37:53 +0100 Subject: [PATCH 221/281] Document how to pass arguments to protocol factory. Fix #851. --- docs/faq.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/faq.rst b/docs/faq.rst index cd0033734..4a083e2d0 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -222,6 +222,26 @@ There are several reasons why long-lived connections may be lost: If you're facing a reproducible issue, :ref:`enable debug logs ` to see when and how connections are closed. +How can I pass additional arguments to a custom protocol subclass? +.................................................................. + +You can bind additional arguments to the protocol factory with +:func:`functools.partial`:: + + import asyncio + import functools + import websockets + + class MyServerProtocol(websockets.WebSocketServerProtocol): + def __init__(self, extra_argument, *args, **kwargs): + super().__init__(*args, **kwargs) + # do something with extra_argument + + create_protocol = functools.partial(MyServerProtocol, extra_argument='spam') + start_server = websockets.serve(..., create_protocol=create_protocol) + +This example was for a server. The same pattern applies on a client. + Why do I get the error: ``module 'websockets' has no attribute '...'``? ....................................................................... From 988572074edbde4dce1e49573e9dca05498bb159 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 13:50:45 +0100 Subject: [PATCH 222/281] Brag with # stargazers. Fix #844. --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index 064c657bf..0c00b96fb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -111,6 +111,7 @@ 'logo': 'websockets.svg', 'description': 'A library for building WebSocket servers and clients in Python with a focus on correctness and simplicity.', 'github_button': True, + 'github_type': 'star', 'github_user': 'aaugustin', 'github_repo': 'websockets', 'tidelift_url': 'https://tidelift.com/subscription/pkg/pypi-websockets?utm_source=pypi-websockets&utm_medium=referral&utm_campaign=docs', From e6d5da9b94167d875e2fb3936e44665fe0f562bc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 16:25:33 +0100 Subject: [PATCH 223/281] Include "broadcast" as a search term. Fix #841. --- docs/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/intro.rst b/docs/intro.rst index 8be700239..8aaaeddca 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -180,7 +180,7 @@ unregister them when they disconnect. # Register. connected.add(websocket) try: - # Implement logic here. + # Broadcast a message to all connected clients. await asyncio.wait([ws.send("Hello!") for ws in connected]) await asyncio.sleep(10) finally: From f6e03bbd1f0e1affdda16488e46ae488ab0ccfcb Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 17:38:30 +0100 Subject: [PATCH 224/281] Run new version of black. --- src/websockets/client.py | 2 +- src/websockets/connection.py | 5 +- src/websockets/exceptions.py | 5 +- src/websockets/server.py | 3 +- tests/test_auth.py | 3 +- tests/test_connection.py | 200 +++++++++++++++++++++++------------ tests/test_frames.py | 49 +++++++-- tests/test_http11.py | 60 ++++++++--- 8 files changed, 232 insertions(+), 95 deletions(-) diff --git a/src/websockets/client.py b/src/websockets/client.py index a7bfcc4ee..b7e407a45 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -280,7 +280,7 @@ def send_request(self, request: Request) -> None: def parse(self) -> Generator[None, None, None]: response = yield from Response.parse( - self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof, + self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof ) assert self.state == CONNECTING try: diff --git a/src/websockets/connection.py b/src/websockets/connection.py index ac30802db..a98d0b1e7 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -63,7 +63,10 @@ class State(enum.IntEnum): class Connection: def __init__( - self, side: Side, state: State = OPEN, max_size: Optional[int] = 2 ** 20, + self, + side: Side, + state: State = OPEN, + max_size: Optional[int] = 2 ** 20, ) -> None: # Connection side. CLIENT or SERVER. self.side = side diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index c60a3e10e..84c27692c 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -302,7 +302,10 @@ class AbortHandshake(InvalidHandshake): """ def __init__( - self, status: http.HTTPStatus, headers: HeadersLike, body: bytes = b"" + self, + status: http.HTTPStatus, + headers: HeadersLike, + body: bytes = b"", ) -> None: self.status = status self.headers = Headers(headers) diff --git a/src/websockets/server.py b/src/websockets/server.py index 1b03eabee..c2c818ce9 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -242,7 +242,8 @@ def process_origin(self, headers: Headers) -> Optional[Origin]: return origin def process_extensions( - self, headers: Headers, + self, + headers: Headers, ) -> Tuple[Optional[str], List[Extension]]: """ Handle the Sec-WebSocket-Extensions HTTP request header. diff --git a/tests/test_auth.py b/tests/test_auth.py index 68642389e..ce23f913d 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -79,7 +79,8 @@ async def check_credentials(username, password): return password == "iloveyou" create_protocol_check_credentials = basic_auth_protocol_factory( - realm="auth-tests", check_credentials=check_credentials, + realm="auth-tests", + check_credentials=check_credentials, ) @with_server(create_protocol=create_protocol_check_credentials) diff --git a/tests/test_connection.py b/tests/test_connection.py index 5c0f7302f..d47147d64 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -59,7 +59,9 @@ def assertConnectionClosing(self, connection, code=None, reason=""): """ close_frame = Frame( - True, OP_CLOSE, b"" if code is None else serialize_close(code, reason), + True, + OP_CLOSE, + b"" if code is None else serialize_close(code, reason), ) # A close frame was received. self.assertFrameReceived(connection, close_frame) @@ -74,7 +76,9 @@ def assertConnectionFailing(self, connection, code=None, reason=""): """ close_frame = Frame( - True, OP_CLOSE, b"" if code is None else serialize_close(code, reason), + True, + OP_CLOSE, + b"" if code is None else serialize_close(code, reason), ) # No frame was received. self.assertFrameReceived(connection, None) @@ -108,14 +112,16 @@ def test_client_receives_unmasked_frame(self): client = Connection(Side.CLIENT) client.receive_data(self.unmasked_text_frame_date) self.assertFrameReceived( - client, Frame(True, OP_TEXT, b"Spam"), + client, + Frame(True, OP_TEXT, b"Spam"), ) def test_server_receives_masked_frame(self): server = Connection(Side.SERVER) server.receive_data(self.masked_text_frame_data) self.assertFrameReceived( - server, Frame(True, OP_TEXT, b"Spam"), + server, + Frame(True, OP_TEXT, b"Spam"), ) def test_client_receives_masked_frame(self): @@ -228,14 +234,16 @@ def test_client_receives_text(self): client = Connection(Side.CLIENT) client.receive_data(b"\x81\x04\xf0\x9f\x98\x80") self.assertFrameReceived( - client, Frame(True, OP_TEXT, "😀".encode()), + client, + Frame(True, OP_TEXT, "😀".encode()), ) def test_server_receives_text(self): server = Connection(Side.SERVER) server.receive_data(b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80") self.assertFrameReceived( - server, Frame(True, OP_TEXT, "😀".encode()), + server, + Frame(True, OP_TEXT, "😀".encode()), ) def test_client_receives_text_over_size_limit(self): @@ -256,14 +264,16 @@ def test_client_receives_text_without_size_limit(self): client = Connection(Side.CLIENT, max_size=None) client.receive_data(b"\x81\x04\xf0\x9f\x98\x80") self.assertFrameReceived( - client, Frame(True, OP_TEXT, "😀".encode()), + client, + Frame(True, OP_TEXT, "😀".encode()), ) def test_server_receives_text_without_size_limit(self): server = Connection(Side.SERVER, max_size=None) server.receive_data(b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80") self.assertFrameReceived( - server, Frame(True, OP_TEXT, "😀".encode()), + server, + Frame(True, OP_TEXT, "😀".encode()), ) def test_client_sends_fragmented_text(self): @@ -293,37 +303,44 @@ def test_client_receives_fragmented_text(self): client = Connection(Side.CLIENT) client.receive_data(b"\x01\x02\xf0\x9f") self.assertFrameReceived( - client, Frame(False, OP_TEXT, "😀".encode()[:2]), + client, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) client.receive_data(b"\x00\x04\x98\x80\xf0\x9f") self.assertFrameReceived( - client, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + client, + Frame(False, OP_CONT, "😀😀".encode()[2:6]), ) client.receive_data(b"\x80\x02\x98\x80") self.assertFrameReceived( - client, Frame(True, OP_CONT, "😀".encode()[2:]), + client, + Frame(True, OP_CONT, "😀".encode()[2:]), ) def test_server_receives_fragmented_text(self): server = Connection(Side.SERVER) server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") self.assertFrameReceived( - server, Frame(False, OP_TEXT, "😀".encode()[:2]), + server, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) server.receive_data(b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f") self.assertFrameReceived( - server, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + server, + Frame(False, OP_CONT, "😀😀".encode()[2:6]), ) server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") self.assertFrameReceived( - server, Frame(True, OP_CONT, "😀".encode()[2:]), + server, + Frame(True, OP_CONT, "😀".encode()[2:]), ) def test_client_receives_fragmented_text_over_size_limit(self): client = Connection(Side.CLIENT, max_size=3) client.receive_data(b"\x01\x02\xf0\x9f") self.assertFrameReceived( - client, Frame(False, OP_TEXT, "😀".encode()[:2]), + client, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) with self.assertRaises(PayloadTooBig) as raised: client.receive_data(b"\x80\x02\x98\x80") @@ -334,7 +351,8 @@ def test_server_receives_fragmented_text_over_size_limit(self): server = Connection(Side.SERVER, max_size=3) server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") self.assertFrameReceived( - server, Frame(False, OP_TEXT, "😀".encode()[:2]), + server, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) with self.assertRaises(PayloadTooBig) as raised: server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") @@ -345,30 +363,36 @@ def test_client_receives_fragmented_text_without_size_limit(self): client = Connection(Side.CLIENT, max_size=None) client.receive_data(b"\x01\x02\xf0\x9f") self.assertFrameReceived( - client, Frame(False, OP_TEXT, "😀".encode()[:2]), + client, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) client.receive_data(b"\x00\x04\x98\x80\xf0\x9f") self.assertFrameReceived( - client, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + client, + Frame(False, OP_CONT, "😀😀".encode()[2:6]), ) client.receive_data(b"\x80\x02\x98\x80") self.assertFrameReceived( - client, Frame(True, OP_CONT, "😀".encode()[2:]), + client, + Frame(True, OP_CONT, "😀".encode()[2:]), ) def test_server_receives_fragmented_text_without_size_limit(self): server = Connection(Side.SERVER, max_size=None) server.receive_data(b"\x01\x82\x00\x00\x00\x00\xf0\x9f") self.assertFrameReceived( - server, Frame(False, OP_TEXT, "😀".encode()[:2]), + server, + Frame(False, OP_TEXT, "😀".encode()[:2]), ) server.receive_data(b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f") self.assertFrameReceived( - server, Frame(False, OP_CONT, "😀😀".encode()[2:6]), + server, + Frame(False, OP_CONT, "😀😀".encode()[2:6]), ) server.receive_data(b"\x80\x82\x00\x00\x00\x00\x98\x80") self.assertFrameReceived( - server, Frame(True, OP_CONT, "😀".encode()[2:]), + server, + Frame(True, OP_CONT, "😀".encode()[2:]), ) def test_client_sends_unexpected_text(self): @@ -389,7 +413,8 @@ def test_client_receives_unexpected_text(self): client = Connection(Side.CLIENT) client.receive_data(b"\x01\x00") self.assertFrameReceived( - client, Frame(False, OP_TEXT, b""), + client, + Frame(False, OP_TEXT, b""), ) with self.assertRaises(ProtocolError) as raised: client.receive_data(b"\x01\x00") @@ -400,7 +425,8 @@ def test_server_receives_unexpected_text(self): server = Connection(Side.SERVER) server.receive_data(b"\x01\x80\x00\x00\x00\x00") self.assertFrameReceived( - server, Frame(False, OP_TEXT, b""), + server, + Frame(False, OP_TEXT, b""), ) with self.assertRaises(ProtocolError) as raised: server.receive_data(b"\x01\x80\x00\x00\x00\x00") @@ -462,14 +488,16 @@ def test_client_receives_binary(self): client = Connection(Side.CLIENT) client.receive_data(b"\x82\x04\x01\x02\xfe\xff") self.assertFrameReceived( - client, Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), + client, + Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), ) def test_server_receives_binary(self): server = Connection(Side.SERVER) server.receive_data(b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff") self.assertFrameReceived( - server, Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), + server, + Frame(True, OP_BINARY, b"\x01\x02\xfe\xff"), ) def test_client_receives_binary_over_size_limit(self): @@ -513,37 +541,44 @@ def test_client_receives_fragmented_binary(self): client = Connection(Side.CLIENT) client.receive_data(b"\x02\x02\x01\x02") self.assertFrameReceived( - client, Frame(False, OP_BINARY, b"\x01\x02"), + client, + Frame(False, OP_BINARY, b"\x01\x02"), ) client.receive_data(b"\x00\x04\xfe\xff\x01\x02") self.assertFrameReceived( - client, Frame(False, OP_CONT, b"\xfe\xff\x01\x02"), + client, + Frame(False, OP_CONT, b"\xfe\xff\x01\x02"), ) client.receive_data(b"\x80\x02\xfe\xff") self.assertFrameReceived( - client, Frame(True, OP_CONT, b"\xfe\xff"), + client, + Frame(True, OP_CONT, b"\xfe\xff"), ) def test_server_receives_fragmented_binary(self): server = Connection(Side.SERVER) server.receive_data(b"\x02\x82\x00\x00\x00\x00\x01\x02") self.assertFrameReceived( - server, Frame(False, OP_BINARY, b"\x01\x02"), + server, + Frame(False, OP_BINARY, b"\x01\x02"), ) server.receive_data(b"\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02") self.assertFrameReceived( - server, Frame(False, OP_CONT, b"\xee\xff\x01\x02"), + server, + Frame(False, OP_CONT, b"\xee\xff\x01\x02"), ) server.receive_data(b"\x80\x82\x00\x00\x00\x00\xfe\xff") self.assertFrameReceived( - server, Frame(True, OP_CONT, b"\xfe\xff"), + server, + Frame(True, OP_CONT, b"\xfe\xff"), ) def test_client_receives_fragmented_binary_over_size_limit(self): client = Connection(Side.CLIENT, max_size=3) client.receive_data(b"\x02\x02\x01\x02") self.assertFrameReceived( - client, Frame(False, OP_BINARY, b"\x01\x02"), + client, + Frame(False, OP_BINARY, b"\x01\x02"), ) with self.assertRaises(PayloadTooBig) as raised: client.receive_data(b"\x80\x02\xfe\xff") @@ -554,7 +589,8 @@ def test_server_receives_fragmented_binary_over_size_limit(self): server = Connection(Side.SERVER, max_size=3) server.receive_data(b"\x02\x82\x00\x00\x00\x00\x01\x02") self.assertFrameReceived( - server, Frame(False, OP_BINARY, b"\x01\x02"), + server, + Frame(False, OP_BINARY, b"\x01\x02"), ) with self.assertRaises(PayloadTooBig) as raised: server.receive_data(b"\x80\x82\x00\x00\x00\x00\xfe\xff") @@ -579,7 +615,8 @@ def test_client_receives_unexpected_binary(self): client = Connection(Side.CLIENT) client.receive_data(b"\x02\x00") self.assertFrameReceived( - client, Frame(False, OP_BINARY, b""), + client, + Frame(False, OP_BINARY, b""), ) with self.assertRaises(ProtocolError) as raised: client.receive_data(b"\x02\x00") @@ -590,7 +627,8 @@ def test_server_receives_unexpected_binary(self): server = Connection(Side.SERVER) server.receive_data(b"\x02\x80\x00\x00\x00\x00") self.assertFrameReceived( - server, Frame(False, OP_BINARY, b""), + server, + Frame(False, OP_BINARY, b""), ) with self.assertRaises(ProtocolError) as raised: server.receive_data(b"\x02\x80\x00\x00\x00\x00") @@ -843,20 +881,24 @@ def test_client_receives_ping(self): client = Connection(Side.CLIENT) client.receive_data(b"\x89\x00") self.assertFrameReceived( - client, Frame(True, OP_PING, b""), + client, + Frame(True, OP_PING, b""), ) self.assertFrameSent( - client, Frame(True, OP_PONG, b""), + client, + Frame(True, OP_PONG, b""), ) def test_server_receives_ping(self): server = Connection(Side.SERVER) server.receive_data(b"\x89\x80\x00\x44\x88\xcc") self.assertFrameReceived( - server, Frame(True, OP_PING, b""), + server, + Frame(True, OP_PING, b""), ) self.assertFrameSent( - server, Frame(True, OP_PONG, b""), + server, + Frame(True, OP_PONG, b""), ) def test_client_sends_ping_with_data(self): @@ -876,20 +918,24 @@ def test_client_receives_ping_with_data(self): client = Connection(Side.CLIENT) client.receive_data(b"\x89\x04\x22\x66\xaa\xee") self.assertFrameReceived( - client, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + client, + Frame(True, OP_PING, b"\x22\x66\xaa\xee"), ) self.assertFrameSent( - client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + client, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) def test_server_receives_ping_with_data(self): server = Connection(Side.SERVER) server.receive_data(b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22") self.assertFrameReceived( - server, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + server, + Frame(True, OP_PING, b"\x22\x66\xaa\xee"), ) self.assertFrameSent( - server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + server, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) def test_client_sends_fragmented_ping_frame(self): @@ -953,7 +999,8 @@ def test_client_receives_ping_after_receiving_close(self): self.assertConnectionClosing(client, 1000) client.receive_data(b"\x89\x04\x22\x66\xaa\xee") self.assertFrameReceived( - client, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + client, + Frame(True, OP_PING, b"\x22\x66\xaa\xee"), ) self.assertFrameSent(client, None) @@ -963,7 +1010,8 @@ def test_server_receives_ping_after_receiving_close(self): self.assertConnectionClosing(server, 1001) server.receive_data(b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22") self.assertFrameReceived( - server, Frame(True, OP_PING, b"\x22\x66\xaa\xee"), + server, + Frame(True, OP_PING, b"\x22\x66\xaa\xee"), ) self.assertFrameSent(server, None) @@ -989,14 +1037,16 @@ def test_client_receives_pong(self): client = Connection(Side.CLIENT) client.receive_data(b"\x8a\x00") self.assertFrameReceived( - client, Frame(True, OP_PONG, b""), + client, + Frame(True, OP_PONG, b""), ) def test_server_receives_pong(self): server = Connection(Side.SERVER) server.receive_data(b"\x8a\x80\x00\x44\x88\xcc") self.assertFrameReceived( - server, Frame(True, OP_PONG, b""), + server, + Frame(True, OP_PONG, b""), ) def test_client_sends_pong_with_data(self): @@ -1016,14 +1066,16 @@ def test_client_receives_pong_with_data(self): client = Connection(Side.CLIENT) client.receive_data(b"\x8a\x04\x22\x66\xaa\xee") self.assertFrameReceived( - client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + client, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) def test_server_receives_pong_with_data(self): server = Connection(Side.SERVER) server.receive_data(b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22") self.assertFrameReceived( - server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + server, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) def test_client_sends_fragmented_pong_frame(self): @@ -1077,7 +1129,8 @@ def test_client_receives_pong_after_receiving_close(self): self.assertConnectionClosing(client, 1000) client.receive_data(b"\x8a\x04\x22\x66\xaa\xee") self.assertFrameReceived( - client, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + client, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) def test_server_receives_pong_after_receiving_close(self): @@ -1086,7 +1139,8 @@ def test_server_receives_pong_after_receiving_close(self): self.assertConnectionClosing(server, 1001) server.receive_data(b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22") self.assertFrameReceived( - server, Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), + server, + Frame(True, OP_PONG, b"\x22\x66\xaa\xee"), ) @@ -1128,52 +1182,64 @@ def test_client_receive_ping_pong_in_fragmented_message(self): client = Connection(Side.CLIENT) client.receive_data(b"\x01\x04Spam") self.assertFrameReceived( - client, Frame(False, OP_TEXT, b"Spam"), + client, + Frame(False, OP_TEXT, b"Spam"), ) client.receive_data(b"\x89\x04Ping") self.assertFrameReceived( - client, Frame(True, OP_PING, b"Ping"), + client, + Frame(True, OP_PING, b"Ping"), ) self.assertFrameSent( - client, Frame(True, OP_PONG, b"Ping"), + client, + Frame(True, OP_PONG, b"Ping"), ) client.receive_data(b"\x00\x03Ham") self.assertFrameReceived( - client, Frame(False, OP_CONT, b"Ham"), + client, + Frame(False, OP_CONT, b"Ham"), ) client.receive_data(b"\x8a\x04Pong") self.assertFrameReceived( - client, Frame(True, OP_PONG, b"Pong"), + client, + Frame(True, OP_PONG, b"Pong"), ) client.receive_data(b"\x80\x04Eggs") self.assertFrameReceived( - client, Frame(True, OP_CONT, b"Eggs"), + client, + Frame(True, OP_CONT, b"Eggs"), ) def test_server_receive_ping_pong_in_fragmented_message(self): server = Connection(Side.SERVER) server.receive_data(b"\x01\x84\x00\x00\x00\x00Spam") self.assertFrameReceived( - server, Frame(False, OP_TEXT, b"Spam"), + server, + Frame(False, OP_TEXT, b"Spam"), ) server.receive_data(b"\x89\x84\x00\x00\x00\x00Ping") self.assertFrameReceived( - server, Frame(True, OP_PING, b"Ping"), + server, + Frame(True, OP_PING, b"Ping"), ) self.assertFrameSent( - server, Frame(True, OP_PONG, b"Ping"), + server, + Frame(True, OP_PONG, b"Ping"), ) server.receive_data(b"\x00\x83\x00\x00\x00\x00Ham") self.assertFrameReceived( - server, Frame(False, OP_CONT, b"Ham"), + server, + Frame(False, OP_CONT, b"Ham"), ) server.receive_data(b"\x8a\x84\x00\x00\x00\x00Pong") self.assertFrameReceived( - server, Frame(True, OP_PONG, b"Pong"), + server, + Frame(True, OP_PONG, b"Pong"), ) server.receive_data(b"\x80\x84\x00\x00\x00\x00Eggs") self.assertFrameReceived( - server, Frame(True, OP_CONT, b"Eggs"), + server, + Frame(True, OP_CONT, b"Eggs"), ) def test_client_send_close_in_fragmented_message(self): @@ -1205,7 +1271,8 @@ def test_client_receive_close_in_fragmented_message(self): client = Connection(Side.CLIENT) client.receive_data(b"\x01\x04Spam") self.assertFrameReceived( - client, Frame(False, OP_TEXT, b"Spam"), + client, + Frame(False, OP_TEXT, b"Spam"), ) # The spec says: "An endpoint MUST be capable of handling control # frames in the middle of a fragmented message." However, since the @@ -1220,7 +1287,8 @@ def test_server_receive_close_in_fragmented_message(self): server = Connection(Side.SERVER) server.receive_data(b"\x01\x84\x00\x00\x00\x00Spam") self.assertFrameReceived( - server, Frame(False, OP_TEXT, b"Spam"), + server, + Frame(False, OP_TEXT, b"Spam"), ) # The spec says: "An endpoint MUST be capable of handling control # frames in the middle of a fragmented message." However, since the diff --git a/tests/test_frames.py b/tests/test_frames.py index 514fe7c54..4d10c6ef2 100644 --- a/tests/test_frames.py +++ b/tests/test_frames.py @@ -22,7 +22,7 @@ def parse(self, data, mask, max_size=None, extensions=None): reader.feed_data(data) reader.feed_eof() parser = Frame.parse( - reader.read_exact, mask=mask, max_size=max_size, extensions=extensions, + reader.read_exact, mask=mask, max_size=max_size, extensions=extensions ) return self.assertGeneratorReturns(parser) @@ -47,7 +47,9 @@ def assertFrameData(self, frame, data, mask, extensions=None): class FrameTests(FramesTestCase): def test_text_unmasked(self): self.assertFrameData( - Frame(True, OP_TEXT, b"Spam"), b"\x81\x04Spam", mask=False, + Frame(True, OP_TEXT, b"Spam"), + b"\x81\x04Spam", + mask=False, ) def test_text_masked(self): @@ -59,7 +61,9 @@ def test_text_masked(self): def test_binary_unmasked(self): self.assertFrameData( - Frame(True, OP_BINARY, b"Eggs"), b"\x82\x04Eggs", mask=False, + Frame(True, OP_BINARY, b"Eggs"), + b"\x82\x04Eggs", + mask=False, ) def test_binary_masked(self): @@ -84,13 +88,25 @@ def test_non_ascii_text_masked(self): ) def test_close(self): - self.assertFrameData(Frame(True, OP_CLOSE, b""), b"\x88\x00", mask=False) + self.assertFrameData( + Frame(True, OP_CLOSE, b""), + b"\x88\x00", + mask=False, + ) def test_ping(self): - self.assertFrameData(Frame(True, OP_PING, b"ping"), b"\x89\x04ping", mask=False) + self.assertFrameData( + Frame(True, OP_PING, b"ping"), + b"\x89\x04ping", + mask=False, + ) def test_pong(self): - self.assertFrameData(Frame(True, OP_PONG, b"pong"), b"\x8a\x04pong", mask=False) + self.assertFrameData( + Frame(True, OP_PONG, b"pong"), + b"\x8a\x04pong", + mask=False, + ) def test_long(self): self.assertFrameData( @@ -179,23 +195,34 @@ def decode(frame, *, max_size=None): class PrepareDataTests(unittest.TestCase): def test_prepare_data_str(self): - self.assertEqual(prepare_data("café"), (OP_TEXT, b"caf\xc3\xa9")) + self.assertEqual( + prepare_data("café"), + (OP_TEXT, b"caf\xc3\xa9"), + ) def test_prepare_data_bytes(self): - self.assertEqual(prepare_data(b"tea"), (OP_BINARY, b"tea")) + self.assertEqual( + prepare_data(b"tea"), + (OP_BINARY, b"tea"), + ) def test_prepare_data_bytearray(self): self.assertEqual( - prepare_data(bytearray(b"tea")), (OP_BINARY, bytearray(b"tea")) + prepare_data(bytearray(b"tea")), + (OP_BINARY, bytearray(b"tea")), ) def test_prepare_data_memoryview(self): self.assertEqual( - prepare_data(memoryview(b"tea")), (OP_BINARY, memoryview(b"tea")) + prepare_data(memoryview(b"tea")), + (OP_BINARY, memoryview(b"tea")), ) def test_prepare_data_non_contiguous_memoryview(self): - self.assertEqual(prepare_data(memoryview(b"tteeaa")[::2]), (OP_BINARY, b"tea")) + self.assertEqual( + prepare_data(memoryview(b"tteeaa")[::2]), + (OP_BINARY, b"tea"), + ) def test_prepare_data_list(self): with self.assertRaises(TypeError): diff --git a/tests/test_http11.py b/tests/test_http11.py index 9e4d70620..1cca2053f 100644 --- a/tests/test_http11.py +++ b/tests/test_http11.py @@ -37,32 +37,45 @@ def test_parse_empty(self): with self.assertRaises(EOFError) as raised: next(self.parse()) self.assertEqual( - str(raised.exception), "connection closed while reading HTTP request line" + str(raised.exception), + "connection closed while reading HTTP request line", ) def test_parse_invalid_request_line(self): self.reader.feed_data(b"GET /\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP request line: GET /") + self.assertEqual( + str(raised.exception), + "invalid HTTP request line: GET /", + ) def test_parse_unsupported_method(self): self.reader.feed_data(b"OPTIONS * HTTP/1.1\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "unsupported HTTP method: OPTIONS") + self.assertEqual( + str(raised.exception), + "unsupported HTTP method: OPTIONS", + ) def test_parse_unsupported_version(self): self.reader.feed_data(b"GET /chat HTTP/1.0\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "unsupported HTTP version: HTTP/1.0") + self.assertEqual( + str(raised.exception), + "unsupported HTTP version: HTTP/1.0", + ) def test_parse_invalid_header(self): self.reader.feed_data(b"GET /chat HTTP/1.1\r\nOops\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP header line: Oops") + self.assertEqual( + str(raised.exception), + "invalid HTTP header line: Oops", + ) def test_serialize(self): # Example from the protocol overview in RFC 6455 @@ -101,7 +114,7 @@ def setUp(self): def parse(self): return Response.parse( - self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof, + self.reader.read_line, self.reader.read_exact, self.reader.read_to_eof ) def test_parse(self): @@ -132,37 +145,55 @@ def test_parse_invalid_status_line(self): self.reader.feed_data(b"Hello!\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP status line: Hello!") + self.assertEqual( + str(raised.exception), + "invalid HTTP status line: Hello!", + ) def test_parse_unsupported_version(self): self.reader.feed_data(b"HTTP/1.0 400 Bad Request\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "unsupported HTTP version: HTTP/1.0") + self.assertEqual( + str(raised.exception), + "unsupported HTTP version: HTTP/1.0", + ) def test_parse_invalid_status(self): self.reader.feed_data(b"HTTP/1.1 OMG WTF\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP status code: OMG") + self.assertEqual( + str(raised.exception), + "invalid HTTP status code: OMG", + ) def test_parse_unsupported_status(self): self.reader.feed_data(b"HTTP/1.1 007 My name is Bond\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "unsupported HTTP status code: 007") + self.assertEqual( + str(raised.exception), + "unsupported HTTP status code: 007", + ) def test_parse_invalid_reason(self): self.reader.feed_data(b"HTTP/1.1 200 \x7f\r\n\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP reason phrase: \x7f") + self.assertEqual( + str(raised.exception), + "invalid HTTP reason phrase: \x7f", + ) def test_parse_invalid_header(self): self.reader.feed_data(b"HTTP/1.1 500 Internal Server Error\r\nOops\r\n") with self.assertRaises(ValueError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "invalid HTTP header line: Oops") + self.assertEqual( + str(raised.exception), + "invalid HTTP header line: Oops", + ) def test_parse_body_with_content_length(self): self.reader.feed_data( @@ -183,7 +214,10 @@ def test_parse_body_with_transfer_encoding(self): self.reader.feed_data(b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n") with self.assertRaises(NotImplementedError) as raised: next(self.parse()) - self.assertEqual(str(raised.exception), "transfer codings aren't supported") + self.assertEqual( + str(raised.exception), + "transfer codings aren't supported", + ) def test_parse_body_no_content(self): self.reader.feed_data(b"HTTP/1.1 204 No Content\r\n\r\n") From 5bce4c1c5e59c8c3f5ec45de1c94f9047126b885 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 17:44:11 +0100 Subject: [PATCH 225/281] Support IRIs in addition to URIs. Fix #832. --- docs/changelog.rst | 2 ++ src/websockets/uri.py | 18 ++++++++++++++++++ tests/test_uri.py | 5 +++++ 3 files changed, 25 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 68ec6f80c..4c0eb7d2c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -24,6 +24,8 @@ Changelog Aliases provide backwards compatibility for all previously public APIs. +* Added support for IRIs in addition to URIs. + 8.1 ... diff --git a/src/websockets/uri.py b/src/websockets/uri.py index 6669e5668..ce21b445b 100644 --- a/src/websockets/uri.py +++ b/src/websockets/uri.py @@ -49,6 +49,10 @@ class WebSocketURI(NamedTuple): WebSocketURI.user_info.__doc__ = "" +# All characters from the gen-delims and sub-delims sets in RFC 3987. +DELIMS = ":/?#[]@!$&'()*+,;=" + + def parse_uri(uri: str) -> WebSocketURI: """ Parse and validate a WebSocket URI. @@ -78,4 +82,18 @@ def parse_uri(uri: str) -> WebSocketURI: if parsed.password is None: raise InvalidURI(uri) user_info = (parsed.username, parsed.password) + + try: + uri.encode("ascii") + except UnicodeEncodeError: + # Input contains non-ASCII characters. + # It must be an IRI. Convert it to a URI. + host = host.encode("idna").decode() + resource_name = urllib.parse.quote(resource_name, safe=DELIMS) + if user_info is not None: + user_info = ( + urllib.parse.quote(user_info[0], safe=DELIMS), + urllib.parse.quote(user_info[1], safe=DELIMS), + ) + return WebSocketURI(secure, host, port, resource_name, user_info) diff --git a/tests/test_uri.py b/tests/test_uri.py index e41860b8e..9eeb8431d 100644 --- a/tests/test_uri.py +++ b/tests/test_uri.py @@ -10,6 +10,11 @@ ("ws://localhost/path?query", (False, "localhost", 80, "/path?query", None)), ("WS://LOCALHOST/PATH?QUERY", (False, "localhost", 80, "/PATH?QUERY", None)), ("ws://user:pass@localhost/", (False, "localhost", 80, "/", ("user", "pass"))), + ("ws://høst/", (False, "xn--hst-0na", 80, "/", None)), + ( + "ws://üser:påss@høst/πass", + (False, "xn--hst-0na", 80, "/%CF%80ass", ("%C3%BCser", "p%C3%A5ss")), + ), ] INVALID_URIS = [ From 72d32619650eace78a4d7e797de9369fbee10ada Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 17:54:07 +0100 Subject: [PATCH 226/281] Improve detection of broken connections. Refs #810. --- src/websockets/protocol.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 92ce8e305..39b578aba 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -877,10 +877,11 @@ async def transfer_data(self) -> None: self.transfer_data_exc = exc self.fail_connection(1002) - except (ConnectionError, EOFError) as exc: + except (ConnectionError, TimeoutError, EOFError) as exc: # Reading data with self.reader.readexactly may raise: # - most subclasses of ConnectionError if the TCP connection # breaks, is reset, or is aborted; + # - TimeoutError if the TCP connection times out; # - IncompleteReadError, a subclass of EOFError, if fewer # bytes are available than requested. self.transfer_data_exc = exc From 8061b03b803fb1ce2c7dfcf7bf3cd48f41d34b83 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 18:18:54 +0100 Subject: [PATCH 227/281] Remove loop argument to asyncio.Queue. Prepare compatibility with Python 3.10. Fix #801. --- src/websockets/__main__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 1a720498d..5013ca04f 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -176,8 +176,13 @@ def main() -> None: # Create an event loop that will run in a background thread. loop = asyncio.new_event_loop() + # Due to zealous removal of the loop parameter in the Queue constructor, + # we need a factory coroutine to run in the freshly created event loop. + async def queue_factory() -> asyncio.Queue[str]: + return asyncio.Queue() + # Create a queue of user inputs. There's no need to limit its size. - inputs: asyncio.Queue[str] = asyncio.Queue(loop=loop) + inputs: asyncio.Queue[str] = loop.run_until_complete(queue_factory()) # Create a stop condition when receiving SIGINT or SIGTERM. stop: asyncio.Future[None] = loop.create_future() From 867a00e5bafa1c8ad412eef06a5b09bac40694dc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 18:32:38 +0100 Subject: [PATCH 228/281] Eliminate ResourceWarning. --- src/websockets/__main__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index 5013ca04f..bce3e4bbb 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -206,6 +206,10 @@ async def queue_factory() -> asyncio.Queue[str]: # Wait for the event loop to terminate. thread.join() + # For reasons unclear, even though the loop is closed in the thread, + # it still thinks it's running here. + loop.close() + if __name__ == "__main__": main() From 32c9036ac5eee02e5167f93474b22e9cddbc78bd Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 18:33:03 +0100 Subject: [PATCH 229/281] Mask expected deprecation warning. --- src/websockets/protocol.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 39b578aba..677d50f2c 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -54,7 +54,14 @@ prepare_data, serialize_close, ) -from .framing import Frame + + +with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", "websockets.framing is deprecated", DeprecationWarning + ) + from .framing import Frame + from .typing import Data, Subprotocol From 07775cfaa07b2fb2e31622af03a4fa62820482fb Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 18:56:03 +0100 Subject: [PATCH 230/281] Mark code for removal. Refs #803. --- src/websockets/asyncio_client.py | 2 ++ src/websockets/asyncio_server.py | 2 ++ src/websockets/protocol.py | 2 ++ 3 files changed, 6 insertions(+) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py index efa29b69a..43e3c1cd2 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/asyncio_client.py @@ -101,6 +101,8 @@ async def read_http_response(self) -> Tuple[int, Headers]: """ try: status_code, reason, headers = await read_response(self.reader) + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. except asyncio.CancelledError: # pragma: no cover raise except Exception as exc: diff --git a/src/websockets/asyncio_server.py b/src/websockets/asyncio_server.py index fe61c7ddc..b4f7fbc92 100644 --- a/src/websockets/asyncio_server.py +++ b/src/websockets/asyncio_server.py @@ -135,6 +135,8 @@ async def handler(self) -> None: available_subprotocols=self.available_subprotocols, extra_headers=self.extra_headers, ) + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. except asyncio.CancelledError: # pragma: no cover raise except ConnectionError: diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 677d50f2c..ba4fc1d3c 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1169,6 +1169,8 @@ async def keepalive_ping(self) -> None: self.fail_connection(1011) break + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. except asyncio.CancelledError: raise From a58540d681fc858fc43fcfaf7a6be33f177446a7 Mon Sep 17 00:00:00 2001 From: konichuvak Date: Thu, 27 Aug 2020 16:26:46 -0400 Subject: [PATCH 231/281] Adds 1012-1014 close codes. Also replac. `list` with a `set` for faster close code lookups. --- src/websockets/exceptions.py | 4 ++++ src/websockets/frames.py | 17 +++++++++++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index 84c27692c..bdadae05e 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -68,6 +68,7 @@ class WebSocketException(Exception): """ +# See https://www.iana.org/assignments/websocket/websocket.xhtml CLOSE_CODES = { 1000: "OK", 1001: "going away", @@ -81,6 +82,9 @@ class WebSocketException(Exception): 1009: "message too big", 1010: "extension required", 1011: "unexpected error", + 1012: "service restart", + 1013: "try again later", + 1014: "bad gateway", 1015: "TLS failure [internal]", } diff --git a/src/websockets/frames.py b/src/websockets/frames.py index 2ff9dbd91..74223c0e8 100644 --- a/src/websockets/frames.py +++ b/src/websockets/frames.py @@ -53,8 +53,21 @@ class Opcode(enum.IntEnum): CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG # Close code that are allowed in a close frame. -# Using a list optimizes `code in EXTERNAL_CLOSE_CODES`. -EXTERNAL_CLOSE_CODES = [1000, 1001, 1002, 1003, 1007, 1008, 1009, 1010, 1011] +# Using a set optimizes `code in EXTERNAL_CLOSE_CODES`. +EXTERNAL_CLOSE_CODES = { + 1000, + 1001, + 1002, + 1003, + 1007, + 1008, + 1009, + 1010, + 1011, + 1012, + 1013, + 1014, +} # Consider converting to a dataclass when dropping support for Python < 3.7. From 189671d990a3ecf2d8bf5c7e0c4d97abc9167c20 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 19:06:18 +0100 Subject: [PATCH 232/281] Add changelog for previous commit. --- docs/changelog.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4c0eb7d2c..c131f0528 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -31,6 +31,8 @@ Changelog * Added compatibility with Python 3.8. +* Added close codes 1012, 1013, and 1014. + 8.0.2 ..... From b39f62a066bde151b7551a0d445705481e247e9b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 21 Nov 2020 20:19:47 +0100 Subject: [PATCH 233/281] Log exceptions consistently. This was the only use of the exception method (vs. exc_info=True). --- src/websockets/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/websockets/connection.py b/src/websockets/connection.py index a98d0b1e7..4a75bede9 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -282,7 +282,7 @@ def step_parser(self) -> None: self.parser_exc = exc raise except Exception as exc: - logger.exception("unexpected exception in parser") + logger.error("unexpected exception in parser", exc_info=True) # Don't include exception details, which may be security-sensitive. self.fail_connection(1011) self.parser_exc = exc From 984da0efa69c0fe3518f1bb81d43775f5ef66902 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 28 Nov 2020 13:21:31 +0100 Subject: [PATCH 234/281] Rename bytes_to_send to data_to_send. Since this function doesn't return bytes, but an iterable of bytes, the name was confusing. --- src/websockets/connection.py | 6 +-- tests/test_client.py | 2 +- tests/test_connection.py | 94 ++++++++++++++++++------------------ tests/test_server.py | 4 +- 4 files changed, 53 insertions(+), 53 deletions(-) diff --git a/src/websockets/connection.py b/src/websockets/connection.py index 4a75bede9..aeb774f00 100644 --- a/src/websockets/connection.py +++ b/src/websockets/connection.py @@ -126,7 +126,7 @@ def receive_data(self, data: bytes) -> None: After calling this method: - - You must call :meth:`bytes_to_send` and send this data. + - You must call :meth:`data_to_send` and send this data. - You should call :meth:`events_received` and process these events. """ @@ -139,7 +139,7 @@ def receive_eof(self) -> None: After calling this method: - - You must call :meth:`bytes_to_send` and send this data. + - You must call :meth:`data_to_send` and send this data. - You shouldn't call :meth:`events_received` as it won't return any new events. @@ -228,7 +228,7 @@ def events_received(self) -> List[Event]: # Public API for getting outgoing data after receiving data or sending events. - def bytes_to_send(self) -> List[bytes]: + def data_to_send(self) -> List[bytes]: """ Return data to write to the connection. diff --git a/tests/test_client.py b/tests/test_client.py index 7a78ee09b..747594bf3 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -27,7 +27,7 @@ def test_send_connect(self): self.assertIsInstance(request, Request) client.send_request(request) self.assertEqual( - client.bytes_to_send(), + client.data_to_send(), [ f"GET /test HTTP/1.1\r\n" f"Host: example.com\r\n" diff --git a/tests/test_connection.py b/tests/test_connection.py index d47147d64..3e39a3f9e 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -35,7 +35,7 @@ def assertFrameSent(self, connection, frame, eof=False): mask=connection.side is Side.CLIENT, extensions=connection.extensions, ) - for write in connection.bytes_to_send() + for write in connection.data_to_send() ] frames_expected = [] if frame is None else [frame] if eof: @@ -101,12 +101,12 @@ def test_client_sends_masked_frame(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\xff\x00\xff"): client.send_text(b"Spam", True) - self.assertEqual(client.bytes_to_send(), [self.masked_text_frame_data]) + self.assertEqual(client.data_to_send(), [self.masked_text_frame_data]) def test_server_sends_unmasked_frame(self): server = Connection(Side.SERVER) server.send_text(b"Spam", True) - self.assertEqual(server.bytes_to_send(), [self.unmasked_text_frame_date]) + self.assertEqual(server.data_to_send(), [self.unmasked_text_frame_date]) def test_client_receives_unmasked_frame(self): client = Connection(Side.CLIENT) @@ -178,7 +178,7 @@ def test_client_sends_continuation_after_sending_close(self): # this is the same test as test_client_sends_unexpected_continuation. with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) with self.assertRaises(ProtocolError) as raised: client.send_continuation(b"", fin=False) self.assertEqual(str(raised.exception), "unexpected continuation frame") @@ -189,7 +189,7 @@ def test_server_sends_continuation_after_sending_close(self): # this is the same test as test_server_sends_unexpected_continuation. server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) with self.assertRaises(ProtocolError) as raised: server.send_continuation(b"", fin=False) self.assertEqual(str(raised.exception), "unexpected continuation frame") @@ -222,13 +222,13 @@ def test_client_sends_text(self): with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_text("😀".encode()) self.assertEqual( - client.bytes_to_send(), [b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80"] + client.data_to_send(), [b"\x81\x84\x00\x00\x00\x00\xf0\x9f\x98\x80"] ) def test_server_sends_text(self): server = Connection(Side.SERVER) server.send_text("😀".encode()) - self.assertEqual(server.bytes_to_send(), [b"\x81\x04\xf0\x9f\x98\x80"]) + self.assertEqual(server.data_to_send(), [b"\x81\x04\xf0\x9f\x98\x80"]) def test_client_receives_text(self): client = Connection(Side.CLIENT) @@ -280,24 +280,24 @@ def test_client_sends_fragmented_text(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_text("😀".encode()[:2], fin=False) - self.assertEqual(client.bytes_to_send(), [b"\x01\x82\x00\x00\x00\x00\xf0\x9f"]) + self.assertEqual(client.data_to_send(), [b"\x01\x82\x00\x00\x00\x00\xf0\x9f"]) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_continuation("😀😀".encode()[2:6], fin=False) self.assertEqual( - client.bytes_to_send(), [b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f"] + client.data_to_send(), [b"\x00\x84\x00\x00\x00\x00\x98\x80\xf0\x9f"] ) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_continuation("😀".encode()[2:], fin=True) - self.assertEqual(client.bytes_to_send(), [b"\x80\x82\x00\x00\x00\x00\x98\x80"]) + self.assertEqual(client.data_to_send(), [b"\x80\x82\x00\x00\x00\x00\x98\x80"]) def test_server_sends_fragmented_text(self): server = Connection(Side.SERVER) server.send_text("😀".encode()[:2], fin=False) - self.assertEqual(server.bytes_to_send(), [b"\x01\x02\xf0\x9f"]) + self.assertEqual(server.data_to_send(), [b"\x01\x02\xf0\x9f"]) server.send_continuation("😀😀".encode()[2:6], fin=False) - self.assertEqual(server.bytes_to_send(), [b"\x00\x04\x98\x80\xf0\x9f"]) + self.assertEqual(server.data_to_send(), [b"\x00\x04\x98\x80\xf0\x9f"]) server.send_continuation("😀".encode()[2:], fin=True) - self.assertEqual(server.bytes_to_send(), [b"\x80\x02\x98\x80"]) + self.assertEqual(server.data_to_send(), [b"\x80\x02\x98\x80"]) def test_client_receives_fragmented_text(self): client = Connection(Side.CLIENT) @@ -437,14 +437,14 @@ def test_client_sends_text_after_sending_close(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) with self.assertRaises(InvalidState): client.send_text(b"") def test_server_sends_text_after_sending_close(self): server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) with self.assertRaises(InvalidState): server.send_text(b"") @@ -476,13 +476,13 @@ def test_client_sends_binary(self): with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_binary(b"\x01\x02\xfe\xff") self.assertEqual( - client.bytes_to_send(), [b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff"] + client.data_to_send(), [b"\x82\x84\x00\x00\x00\x00\x01\x02\xfe\xff"] ) def test_server_sends_binary(self): server = Connection(Side.SERVER) server.send_binary(b"\x01\x02\xfe\xff") - self.assertEqual(server.bytes_to_send(), [b"\x82\x04\x01\x02\xfe\xff"]) + self.assertEqual(server.data_to_send(), [b"\x82\x04\x01\x02\xfe\xff"]) def test_client_receives_binary(self): client = Connection(Side.CLIENT) @@ -518,24 +518,24 @@ def test_client_sends_fragmented_binary(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_binary(b"\x01\x02", fin=False) - self.assertEqual(client.bytes_to_send(), [b"\x02\x82\x00\x00\x00\x00\x01\x02"]) + self.assertEqual(client.data_to_send(), [b"\x02\x82\x00\x00\x00\x00\x01\x02"]) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_continuation(b"\xee\xff\x01\x02", fin=False) self.assertEqual( - client.bytes_to_send(), [b"\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02"] + client.data_to_send(), [b"\x00\x84\x00\x00\x00\x00\xee\xff\x01\x02"] ) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_continuation(b"\xee\xff", fin=True) - self.assertEqual(client.bytes_to_send(), [b"\x80\x82\x00\x00\x00\x00\xee\xff"]) + self.assertEqual(client.data_to_send(), [b"\x80\x82\x00\x00\x00\x00\xee\xff"]) def test_server_sends_fragmented_binary(self): server = Connection(Side.SERVER) server.send_binary(b"\x01\x02", fin=False) - self.assertEqual(server.bytes_to_send(), [b"\x02\x02\x01\x02"]) + self.assertEqual(server.data_to_send(), [b"\x02\x02\x01\x02"]) server.send_continuation(b"\xee\xff\x01\x02", fin=False) - self.assertEqual(server.bytes_to_send(), [b"\x00\x04\xee\xff\x01\x02"]) + self.assertEqual(server.data_to_send(), [b"\x00\x04\xee\xff\x01\x02"]) server.send_continuation(b"\xee\xff", fin=True) - self.assertEqual(server.bytes_to_send(), [b"\x80\x02\xee\xff"]) + self.assertEqual(server.data_to_send(), [b"\x80\x02\xee\xff"]) def test_client_receives_fragmented_binary(self): client = Connection(Side.CLIENT) @@ -639,14 +639,14 @@ def test_client_sends_binary_after_sending_close(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) with self.assertRaises(InvalidState): client.send_binary(b"") def test_server_sends_binary_after_sending_close(self): server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) with self.assertRaises(InvalidState): server.send_binary(b"") @@ -677,13 +677,13 @@ def test_client_sends_close(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x3c\x3c\x3c\x3c"): client.send_close() - self.assertEqual(client.bytes_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) + self.assertEqual(client.data_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) self.assertIs(client.state, State.CLOSING) def test_server_sends_close(self): server = Connection(Side.SERVER) server.send_close() - self.assertEqual(server.bytes_to_send(), [b"\x88\x00", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x00", b""]) self.assertIs(server.state, State.CLOSING) def test_client_receives_close(self): @@ -691,14 +691,14 @@ def test_client_receives_close(self): with self.enforce_mask(b"\x3c\x3c\x3c\x3c"): client.receive_data(b"\x88\x00") self.assertEqual(client.events_received(), [Frame(True, OP_CLOSE, b"")]) - self.assertEqual(client.bytes_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) + self.assertEqual(client.data_to_send(), [b"\x88\x80\x3c\x3c\x3c\x3c"]) self.assertIs(client.state, State.CLOSING) def test_server_receives_close(self): server = Connection(Side.SERVER) server.receive_data(b"\x88\x80\x3c\x3c\x3c\x3c") self.assertEqual(server.events_received(), [Frame(True, OP_CLOSE, b"")]) - self.assertEqual(server.bytes_to_send(), [b"\x88\x00", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x00", b""]) self.assertIs(server.state, State.CLOSING) def test_client_sends_close_then_receives_close(self): @@ -761,13 +761,13 @@ def test_client_sends_close_with_code(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) self.assertIs(client.state, State.CLOSING) def test_server_sends_close_with_code(self): server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) self.assertIs(server.state, State.CLOSING) def test_client_receives_close_with_code(self): @@ -787,14 +787,14 @@ def test_client_sends_close_with_code_and_reason(self): with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001, "going away") self.assertEqual( - client.bytes_to_send(), [b"\x88\x8c\x00\x00\x00\x00\x03\xe9going away"] + client.data_to_send(), [b"\x88\x8c\x00\x00\x00\x00\x03\xe9going away"] ) self.assertIs(client.state, State.CLOSING) def test_server_sends_close_with_code_and_reason(self): server = Connection(Side.SERVER) server.send_close(1000, "OK") - self.assertEqual(server.bytes_to_send(), [b"\x88\x04\x03\xe8OK", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x04\x03\xe8OK", b""]) self.assertIs(server.state, State.CLOSING) def test_client_receives_close_with_code_and_reason(self): @@ -870,12 +870,12 @@ def test_client_sends_ping(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x44\x88\xcc"): client.send_ping(b"") - self.assertEqual(client.bytes_to_send(), [b"\x89\x80\x00\x44\x88\xcc"]) + self.assertEqual(client.data_to_send(), [b"\x89\x80\x00\x44\x88\xcc"]) def test_server_sends_ping(self): server = Connection(Side.SERVER) server.send_ping(b"") - self.assertEqual(server.bytes_to_send(), [b"\x89\x00"]) + self.assertEqual(server.data_to_send(), [b"\x89\x00"]) def test_client_receives_ping(self): client = Connection(Side.CLIENT) @@ -906,13 +906,13 @@ def test_client_sends_ping_with_data(self): with self.enforce_mask(b"\x00\x44\x88\xcc"): client.send_ping(b"\x22\x66\xaa\xee") self.assertEqual( - client.bytes_to_send(), [b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] + client.data_to_send(), [b"\x89\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] ) def test_server_sends_ping_with_data(self): server = Connection(Side.SERVER) server.send_ping(b"\x22\x66\xaa\xee") - self.assertEqual(server.bytes_to_send(), [b"\x89\x04\x22\x66\xaa\xee"]) + self.assertEqual(server.data_to_send(), [b"\x89\x04\x22\x66\xaa\xee"]) def test_client_receives_ping_with_data(self): client = Connection(Side.CLIENT) @@ -970,7 +970,7 @@ def test_client_sends_ping_after_sending_close(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) # The spec says: "An endpoint MAY send a Ping frame any time (...) # before the connection is closed" but websockets doesn't support # sending a Ping frame after a Close frame. @@ -983,7 +983,7 @@ def test_client_sends_ping_after_sending_close(self): def test_server_sends_ping_after_sending_close(self): server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) # The spec says: "An endpoint MAY send a Ping frame any time (...) # before the connection is closed" but websockets doesn't support # sending a Ping frame after a Close frame. @@ -1026,12 +1026,12 @@ def test_client_sends_pong(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x44\x88\xcc"): client.send_pong(b"") - self.assertEqual(client.bytes_to_send(), [b"\x8a\x80\x00\x44\x88\xcc"]) + self.assertEqual(client.data_to_send(), [b"\x8a\x80\x00\x44\x88\xcc"]) def test_server_sends_pong(self): server = Connection(Side.SERVER) server.send_pong(b"") - self.assertEqual(server.bytes_to_send(), [b"\x8a\x00"]) + self.assertEqual(server.data_to_send(), [b"\x8a\x00"]) def test_client_receives_pong(self): client = Connection(Side.CLIENT) @@ -1054,13 +1054,13 @@ def test_client_sends_pong_with_data(self): with self.enforce_mask(b"\x00\x44\x88\xcc"): client.send_pong(b"\x22\x66\xaa\xee") self.assertEqual( - client.bytes_to_send(), [b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] + client.data_to_send(), [b"\x8a\x84\x00\x44\x88\xcc\x22\x22\x22\x22"] ) def test_server_sends_pong_with_data(self): server = Connection(Side.SERVER) server.send_pong(b"\x22\x66\xaa\xee") - self.assertEqual(server.bytes_to_send(), [b"\x8a\x04\x22\x66\xaa\xee"]) + self.assertEqual(server.data_to_send(), [b"\x8a\x04\x22\x66\xaa\xee"]) def test_client_receives_pong_with_data(self): client = Connection(Side.CLIENT) @@ -1110,7 +1110,7 @@ def test_client_sends_pong_after_sending_close(self): client = Connection(Side.CLIENT) with self.enforce_mask(b"\x00\x00\x00\x00"): client.send_close(1001) - self.assertEqual(client.bytes_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) + self.assertEqual(client.data_to_send(), [b"\x88\x82\x00\x00\x00\x00\x03\xe9"]) # websockets doesn't support sending a Pong frame after a Close frame. with self.assertRaises(InvalidState): client.send_pong(b"") @@ -1118,7 +1118,7 @@ def test_client_sends_pong_after_sending_close(self): def test_server_sends_pong_after_sending_close(self): server = Connection(Side.SERVER) server.send_close(1000) - self.assertEqual(server.bytes_to_send(), [b"\x88\x02\x03\xe8", b""]) + self.assertEqual(server.data_to_send(), [b"\x88\x02\x03\xe8", b""]) # websockets doesn't support sending a Pong frame after a Close frame. with self.assertRaises(InvalidState): server.send_pong(b"") @@ -1465,13 +1465,13 @@ def test_client_extension_encodes_frame(self): client.extensions = [Rsv2Extension()] with self.enforce_mask(b"\x00\x44\x88\xcc"): client.send_ping(b"") - self.assertEqual(client.bytes_to_send(), [b"\xa9\x80\x00\x44\x88\xcc"]) + self.assertEqual(client.data_to_send(), [b"\xa9\x80\x00\x44\x88\xcc"]) def test_server_extension_encodes_frame(self): server = Connection(Side.SERVER) server.extensions = [Rsv2Extension()] server.send_ping(b"") - self.assertEqual(server.bytes_to_send(), [b"\xa9\x00"]) + self.assertEqual(server.data_to_send(), [b"\xa9\x00"]) def test_client_extension_decodes_frame(self): client = Connection(Side.CLIENT) diff --git a/tests/test_server.py b/tests/test_server.py index a180b08e2..ad56a37bc 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -91,7 +91,7 @@ def test_send_accept(self): self.assertIsInstance(response, Response) server.send_response(response) self.assertEqual( - server.bytes_to_send(), + server.data_to_send(), [ f"HTTP/1.1 101 Switching Protocols\r\n" f"Upgrade: websocket\r\n" @@ -111,7 +111,7 @@ def test_send_reject(self): self.assertIsInstance(response, Response) server.send_response(response) self.assertEqual( - server.bytes_to_send(), + server.data_to_send(), [ f"HTTP/1.1 404 Not Found\r\n" f"Date: {DATE}\r\n" From 44a5453612e7020d1305355c74c3d08ee4db4e91 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 28 Nov 2020 15:16:46 +0100 Subject: [PATCH 235/281] Extract logic for auto-configuring compression. --- src/websockets/asyncio_client.py | 12 +-- src/websockets/asyncio_server.py | 10 +- .../extensions/permessage_deflate.py | 45 +++++++++ tests/extensions/test_base.py | 36 +++++++ tests/extensions/test_permessage_deflate.py | 94 +++++++++++++++++++ tests/test_asyncio_client_server.py | 66 +------------ 6 files changed, 184 insertions(+), 79 deletions(-) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py index 43e3c1cd2..d22ba764a 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/asyncio_client.py @@ -22,7 +22,7 @@ SecurityError, ) from .extensions.base import ClientExtensionFactory, Extension -from .extensions.permessage_deflate import ClientPerMessageDeflateFactory +from .extensions.permessage_deflate import enable_client_permessage_deflate from .handshake_legacy import build_request, check_response from .headers import ( build_authorization_basic, @@ -425,15 +425,7 @@ def __init__( ) if compression == "deflate": - if extensions is None: - extensions = [] - if not any( - extension_factory.name == ClientPerMessageDeflateFactory.name - for extension_factory in extensions - ): - extensions = list(extensions) + [ - ClientPerMessageDeflateFactory(client_max_window_bits=True) - ] + extensions = enable_client_permessage_deflate(extensions) elif compression is not None: raise ValueError(f"unsupported compression: {compression}") diff --git a/src/websockets/asyncio_server.py b/src/websockets/asyncio_server.py index b4f7fbc92..79ceddf4b 100644 --- a/src/websockets/asyncio_server.py +++ b/src/websockets/asyncio_server.py @@ -39,7 +39,7 @@ NegotiationError, ) from .extensions.base import Extension, ServerExtensionFactory -from .extensions.permessage_deflate import ServerPerMessageDeflateFactory +from .extensions.permessage_deflate import enable_server_permessage_deflate from .handshake_legacy import build_response, check_request from .headers import build_extension, parse_extension, parse_subprotocol from .http import USER_AGENT @@ -903,13 +903,7 @@ def __init__( secure = kwargs.get("ssl") is not None if compression == "deflate": - if extensions is None: - extensions = [] - if not any( - ext_factory.name == ServerPerMessageDeflateFactory.name - for ext_factory in extensions - ): - extensions = list(extensions) + [ServerPerMessageDeflateFactory()] + extensions = enable_server_permessage_deflate(extensions) elif compression is not None: raise ValueError(f"unsupported compression: {compression}") diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 184183061..9a3fc4ba5 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -22,7 +22,9 @@ __all__ = [ "PerMessageDeflate", "ClientPerMessageDeflateFactory", + "enable_client_permessage_deflate", "ServerPerMessageDeflateFactory", + "enable_server_permessage_deflate", ] _EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff" @@ -424,6 +426,29 @@ def process_response_params( ) +def enable_client_permessage_deflate( + extensions: Optional[Sequence[ClientExtensionFactory]], +) -> Sequence[ClientExtensionFactory]: + """ + Enable Per-Message Deflate with default settings in client extensions. + + If the extension is already present, perhaps with non-default settings, + the configuration isn't changed. + + + """ + if extensions is None: + extensions = [] + if not any( + extension_factory.name == ClientPerMessageDeflateFactory.name + for extension_factory in extensions + ): + extensions = list(extensions) + [ + ClientPerMessageDeflateFactory(client_max_window_bits=True) + ] + return extensions + + class ServerPerMessageDeflateFactory(ServerExtensionFactory): """ Server-side extension factory for the Per-Message Deflate extension. @@ -584,3 +609,23 @@ def process_request_params( self.compress_settings, ), ) + + +def enable_server_permessage_deflate( + extensions: Optional[Sequence[ServerExtensionFactory]], +) -> Sequence[ServerExtensionFactory]: + """ + Enable Per-Message Deflate with default settings in server extensions. + + If the extension is already present, perhaps with non-default settings, + the configuration isn't changed. + + """ + if extensions is None: + extensions = [] + if not any( + ext_factory.name == ServerPerMessageDeflateFactory.name + for ext_factory in extensions + ): + extensions = list(extensions) + [ServerPerMessageDeflateFactory()] + return extensions diff --git a/tests/extensions/test_base.py b/tests/extensions/test_base.py index ba8657b65..0daa34211 100644 --- a/tests/extensions/test_base.py +++ b/tests/extensions/test_base.py @@ -1,4 +1,40 @@ +from websockets.exceptions import NegotiationError from websockets.extensions.base import * # noqa # Abstract classes don't provide any behavior to test. + + +class ClientNoOpExtensionFactory: + name = "x-no-op" + + def get_request_params(self): + return [] + + def process_response_params(self, params, accepted_extensions): + if params: + raise NegotiationError() + return NoOpExtension() + + +class ServerNoOpExtensionFactory: + name = "x-no-op" + + def __init__(self, params=None): + self.params = params or [] + + def process_request_params(self, params, accepted_extensions): + return self.params, NoOpExtension() + + +class NoOpExtension: + name = "x-no-op" + + def __repr__(self): + return "NoOpExtension()" + + def decode(self, frame, *, max_size=None): + return frame + + def encode(self, frame): + return frame diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index f9fca1999..328861e58 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -20,6 +20,8 @@ serialize_close, ) +from .test_base import ClientNoOpExtensionFactory, ServerNoOpExtensionFactory + class ExtensionTestsMixin: def assertExtensionEqual(self, extension1, extension2): @@ -500,6 +502,52 @@ def test_process_response_params_deduplication(self): [], [PerMessageDeflate(False, False, 15, 15)] ) + def test_enable_client_permessage_deflate(self): + for extensions, ( + expected_len, + expected_position, + expected_compress_settings, + ) in [ + ( + None, + (1, 0, None), + ), + ( + [], + (1, 0, None), + ), + ( + [ClientNoOpExtensionFactory()], + (2, 1, None), + ), + ( + [ClientPerMessageDeflateFactory(compress_settings={"level": 1})], + (1, 0, {"level": 1}), + ), + ( + [ + ClientPerMessageDeflateFactory(compress_settings={"level": 1}), + ClientNoOpExtensionFactory(), + ], + (2, 0, {"level": 1}), + ), + ( + [ + ClientNoOpExtensionFactory(), + ClientPerMessageDeflateFactory(compress_settings={"level": 1}), + ], + (2, 1, {"level": 1}), + ), + ]: + with self.subTest(extensions=extensions): + extensions = enable_client_permessage_deflate(extensions) + self.assertEqual(len(extensions), expected_len) + extension = extensions[expected_position] + self.assertIsInstance(extension, ClientPerMessageDeflateFactory) + self.assertEqual( + extension.compress_settings, expected_compress_settings + ) + class ServerPerMessageDeflateFactoryTests(unittest.TestCase, ExtensionTestsMixin): def test_name(self): @@ -790,3 +838,49 @@ def test_process_response_params_deduplication(self): factory.process_request_params( [], [PerMessageDeflate(False, False, 15, 15)] ) + + def test_enable_server_permessage_deflate(self): + for extensions, ( + expected_len, + expected_position, + expected_compress_settings, + ) in [ + ( + None, + (1, 0, None), + ), + ( + [], + (1, 0, None), + ), + ( + [ServerNoOpExtensionFactory()], + (2, 1, None), + ), + ( + [ServerPerMessageDeflateFactory(compress_settings={"level": 1})], + (1, 0, {"level": 1}), + ), + ( + [ + ServerPerMessageDeflateFactory(compress_settings={"level": 1}), + ServerNoOpExtensionFactory(), + ], + (2, 0, {"level": 1}), + ), + ( + [ + ServerNoOpExtensionFactory(), + ServerPerMessageDeflateFactory(compress_settings={"level": 1}), + ], + (2, 1, {"level": 1}), + ), + ]: + with self.subTest(extensions=extensions): + extensions = enable_server_permessage_deflate(extensions) + self.assertEqual(len(extensions), expected_len) + extension = extensions[expected_position] + self.assertIsInstance(extension, ServerPerMessageDeflateFactory) + self.assertEqual( + extension.compress_settings, expected_compress_settings + ) diff --git a/tests/test_asyncio_client_server.py b/tests/test_asyncio_client_server.py index cff76d1f2..76c29334e 100644 --- a/tests/test_asyncio_client_server.py +++ b/tests/test_asyncio_client_server.py @@ -34,6 +34,11 @@ from websockets.protocol import State from websockets.uri import parse_uri +from .extensions.test_base import ( + ClientNoOpExtensionFactory, + NoOpExtension, + ServerNoOpExtensionFactory, +) from .test_protocol import MS from .utils import AsyncioTestCase @@ -188,41 +193,6 @@ class BarClientProtocol(WebSocketClientProtocol): pass -class ClientNoOpExtensionFactory: - name = "x-no-op" - - def get_request_params(self): - return [] - - def process_response_params(self, params, accepted_extensions): - if params: - raise NegotiationError() - return NoOpExtension() - - -class ServerNoOpExtensionFactory: - name = "x-no-op" - - def __init__(self, params=None): - self.params = params or [] - - def process_request_params(self, params, accepted_extensions): - return self.params, NoOpExtension() - - -class NoOpExtension: - name = "x-no-op" - - def __repr__(self): - return "NoOpExtension()" - - def decode(self, frame, *, max_size=None): - return frame - - def encode(self, frame): - return frame - - class ClientServerTestsMixin: secure = False @@ -974,32 +944,6 @@ def test_compression_deflate(self): repr([PerMessageDeflate(False, False, 15, 15)]), ) - @with_server( - extensions=[ - ServerPerMessageDeflateFactory( - client_no_context_takeover=True, server_max_window_bits=10 - ) - ], - compression="deflate", # overridden by explicit config - ) - @with_client( - "/extensions", - extensions=[ - ClientPerMessageDeflateFactory( - server_no_context_takeover=True, client_max_window_bits=12 - ) - ], - compression="deflate", # overridden by explicit config - ) - def test_compression_deflate_and_explicit_config(self): - server_extensions = self.loop.run_until_complete(self.client.recv()) - self.assertEqual( - server_extensions, repr([PerMessageDeflate(True, True, 12, 10)]) - ) - self.assertEqual( - repr(self.client.extensions), repr([PerMessageDeflate(True, True, 10, 12)]) - ) - def test_compression_unsupported_server(self): with self.assertRaises(ValueError): self.start_server(compression="xz") From 3f36975b197f1250258055d403d2061f70013278 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 28 Nov 2020 15:18:44 +0100 Subject: [PATCH 236/281] Name asyncio protocol consistently. This isn't comparable to ws_server on the server side. --- src/websockets/asyncio_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/websockets/asyncio_client.py b/src/websockets/asyncio_client.py index d22ba764a..3f406170a 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/asyncio_client.py @@ -517,7 +517,7 @@ async def __aexit__( exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: - await self.ws_client.close() + await self.protocol.close() # await connect(...) @@ -546,7 +546,7 @@ async def __await_impl__(self) -> WebSocketClientProtocol: await protocol.wait_closed() raise else: - self.ws_client = protocol + self.protocol = protocol return protocol except RedirectHandshake as exc: self.handle_redirect(exc.uri) From 32b95fb0dd2cfc07d38df45dcf7f0ebf05008424 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 28 Nov 2020 15:19:11 +0100 Subject: [PATCH 237/281] Name pong waiter consistently. --- src/websockets/protocol.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index ba4fc1d3c..1552fb060 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -291,7 +291,7 @@ def __init__( # Protect sending fragmented messages. self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None - # Mapping of ping IDs to waiters, in chronological order. + # Mapping of ping IDs to pong waiters, in chronological order. self.pings: Dict[bytes, asyncio.Future[None]] = {} # Task running the data transfer. @@ -736,15 +736,15 @@ async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: """ Send a ping. - Return a :class:`~asyncio.Future` which will be completed when the - corresponding pong is received and which you may ignore if you don't - want to wait. + Return a :class:`~asyncio.Future` that will be completed when the + corresponding pong is received. You can ignore it if you don't intend + to wait. A ping may serve as a keepalive or as a check that the remote endpoint received all messages up to this point:: pong_waiter = await ws.ping() - await pong_waiter # only if you want to wait for the pong + await pong_waiter # only if you want to wait for the pong By default, the ping contains four random bytes. This payload may be overridden with the optional ``data`` argument which must be a string @@ -1155,12 +1155,12 @@ async def keepalive_ping(self) -> None: # ping() raises ConnectionClosed if the connection is lost, # when connection_lost() calls abort_pings(). - ping_waiter = await self.ping() + pong_waiter = await self.ping() if self.ping_timeout is not None: try: await asyncio.wait_for( - ping_waiter, + pong_waiter, self.ping_timeout, loop=self.loop if sys.version_info[:2] < (3, 8) else None, ) From 165d0c69548e4c9d02624bcbb6eb565bb4c0c136 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 28 Nov 2020 19:15:43 +0100 Subject: [PATCH 238/281] Move asyncio-based APIs to a legacy subpackage. Clean up deprecations in the process. --- docs/api.rst | 8 +- docs/changelog.rst | 136 +- docs/cheatsheet.rst | 36 +- docs/deployment.rst | 6 +- docs/design.rst | 178 +- docs/extensions.rst | 5 +- docs/faq.rst | 8 +- docs/intro.rst | 2 +- setup.py | 2 +- src/websockets/__init__.py | 10 +- src/websockets/__main__.py | 2 +- src/websockets/auth.py | 163 +- src/websockets/client.py | 11 +- src/websockets/exceptions.py | 2 +- src/websockets/framing.py | 135 +- src/websockets/handshake.py | 8 +- src/websockets/http.py | 4 +- src/websockets/legacy/__init__.py | 0 src/websockets/legacy/auth.py | 165 ++ .../{asyncio_client.py => legacy/client.py} | 22 +- src/websockets/legacy/framing.py | 135 ++ .../handshake.py} | 12 +- .../{http_legacy.py => legacy/http.py} | 4 +- src/websockets/legacy/protocol.py | 1459 ++++++++++++++++ .../{asyncio_server.py => legacy/server.py} | 32 +- src/websockets/protocol.py | 1466 +--------------- src/websockets/server.py | 15 +- tests/__init__.py | 10 - tests/legacy/__init__.py | 0 tests/legacy/test_auth.py | 160 ++ .../test_client_server.py} | 39 +- tests/legacy/test_framing.py | 171 ++ .../test_handshake.py} | 2 +- .../test_http.py} | 4 +- tests/legacy/test_protocol.py | 1489 ++++++++++++++++ tests/legacy/utils.py | 93 + tests/test_auth.py | 162 +- tests/test_exports.py | 6 +- tests/test_framing.py | 174 +- tests/test_protocol.py | 1491 +---------------- tests/utils.py | 92 - 41 files changed, 3964 insertions(+), 3955 deletions(-) create mode 100644 src/websockets/legacy/__init__.py create mode 100644 src/websockets/legacy/auth.py rename src/websockets/{asyncio_client.py => legacy/client.py} (97%) create mode 100644 src/websockets/legacy/framing.py rename src/websockets/{handshake_legacy.py => legacy/handshake.py} (93%) rename src/websockets/{http_legacy.py => legacy/http.py} (98%) create mode 100644 src/websockets/legacy/protocol.py rename src/websockets/{asyncio_server.py => legacy/server.py} (97%) create mode 100644 tests/legacy/__init__.py create mode 100644 tests/legacy/test_auth.py rename tests/{test_asyncio_client_server.py => legacy/test_client_server.py} (97%) create mode 100644 tests/legacy/test_framing.py rename tests/{test_handshake_legacy.py => legacy/test_handshake.py} (99%) rename tests/{test_http_legacy.py => legacy/test_http.py} (98%) create mode 100644 tests/legacy/test_protocol.py create mode 100644 tests/legacy/utils.py diff --git a/docs/api.rst b/docs/api.rst index b4bddaf38..c73cf59d3 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -29,7 +29,7 @@ High-level Server ...... -.. automodule:: websockets.server +.. automodule:: websockets.legacy.server .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) :async: @@ -53,7 +53,7 @@ Server Client ...... -.. automodule:: websockets.client +.. automodule:: websockets.legacy.client .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) :async: @@ -68,7 +68,7 @@ Client Shared ...... -.. automodule:: websockets.protocol +.. automodule:: websockets.legacy.protocol .. autoclass:: WebSocketCommonProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) @@ -107,7 +107,7 @@ Per-Message Deflate Extension HTTP Basic Auth ............... -.. automodule:: websockets.auth +.. automodule:: websockets.legacy.auth .. autofunction:: basic_auth_protocol_factory diff --git a/docs/changelog.rst b/docs/changelog.rst index c131f0528..291ec6938 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,17 +10,23 @@ Changelog .. note:: - **Version 9.0 moves or deprecates several low-level APIs.** + **Version 9.0 moves or deprecates several APIs.** * Import :class:`~datastructures.Headers` and :exc:`~datastructures.MultipleValuesError` from :mod:`websockets.datastructures` instead of :mod:`websockets.http`. + * :mod:`websockets.client`, :mod:`websockets.server,` + :mod:`websockets.protocol`, and :mod:`websockets.auth` were moved to + :mod:`websockets.legacy.client`, :mod:`websockets.legacy.server`, + :mod:`websockets.legacy.protocol`, and :mod:`websockets.legacy.auth` + respectively. + * :mod:`websockets.handshake` is deprecated. * :mod:`websockets.http` is deprecated. - * :mod:`websocket.framing` is deprecated. + * :mod:`websockets.framing` is deprecated. Aliases provide backwards compatibility for all previously public APIs. @@ -37,7 +43,7 @@ Changelog ..... * Restored the ability to pass a socket with the ``sock`` parameter of - :func:`~server.serve`. + :func:`~legacy.server.serve`. * Removed an incorrect assertion when a connection drops. @@ -60,9 +66,9 @@ Changelog Previously, it could be a function or a coroutine. - If you're passing a ``process_request`` argument to :func:`~server.serve` - or :class:`~server.WebSocketServerProtocol`, or if you're overriding - :meth:`~protocol.WebSocketServerProtocol.process_request` in a subclass, + If you're passing a ``process_request`` argument to :func:`~legacy.server.serve` + or :class:`~legacy.server.WebSocketServerProtocol`, or if you're overriding + :meth:`~legacy.server.WebSocketServerProtocol.process_request` in a subclass, define it with ``async def`` instead of ``def``. For backwards compatibility, functions are still mostly supported, but @@ -78,10 +84,10 @@ Changelog .. note:: **Version 8.0 deprecates the** ``host`` **,** ``port`` **, and** ``secure`` - **attributes of** :class:`~protocol.WebSocketCommonProtocol`. + **attributes of** :class:`~legacy.protocol.WebSocketCommonProtocol`. - Use :attr:`~protocol.WebSocketCommonProtocol.local_address` in servers and - :attr:`~protocol.WebSocketCommonProtocol.remote_address` in clients + Use :attr:`~legacy.protocol.WebSocketCommonProtocol.local_address` in servers and + :attr:`~legacy.protocol.WebSocketCommonProtocol.remote_address` in clients instead of ``host`` and ``port``. .. note:: @@ -98,9 +104,9 @@ Changelog Also: -* :meth:`~protocol.WebSocketCommonProtocol.send`, - :meth:`~protocol.WebSocketCommonProtocol.ping`, and - :meth:`~protocol.WebSocketCommonProtocol.pong` support bytes-like types +* :meth:`~legacy.protocol.WebSocketCommonProtocol.send`, + :meth:`~legacy.protocol.WebSocketCommonProtocol.ping`, and + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` support bytes-like types :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`. * Added :exc:`~exceptions.ConnectionClosedOK` and @@ -108,18 +114,18 @@ Also: :exc:`~exceptions.ConnectionClosed` to tell apart normal connection termination from errors. -* Added :func:`~auth.basic_auth_protocol_factory` to enforce HTTP Basic Auth +* Added :func:`~legacy.auth.basic_auth_protocol_factory` to enforce HTTP Basic Auth on the server side. -* :func:`~client.connect` handles redirects from the server during the +* :func:`~legacy.client.connect` handles redirects from the server during the handshake. -* :func:`~client.connect` supports overriding ``host`` and ``port``. +* :func:`~legacy.client.connect` supports overriding ``host`` and ``port``. -* Added :func:`~client.unix_connect` for connecting to Unix sockets. +* Added :func:`~legacy.client.unix_connect` for connecting to Unix sockets. * Improved support for sending fragmented messages by accepting asynchronous - iterators in :meth:`~protocol.WebSocketCommonProtocol.send`. + iterators in :meth:`~legacy.protocol.WebSocketCommonProtocol.send`. * Prevented spurious log messages about :exc:`~exceptions.ConnectionClosed` exceptions in keepalive ping task. If you were using ``ping_timeout=None`` @@ -150,7 +156,7 @@ Also: .. warning:: **Version 7.0 renames the** ``timeout`` **argument of** - :func:`~server.serve()` **and** :func:`~client.connect` **to** + :func:`~legacy.server.serve()` **and** :func:`~legacy.client.connect` **to** ``close_timeout`` **.** This prevents confusion with ``ping_timeout``. @@ -160,7 +166,7 @@ Also: .. warning:: **Version 7.0 changes how a server terminates connections when it's - closed with** :meth:`~server.WebSocketServer.close` **.** + closed with** :meth:`~legacy.server.WebSocketServer.close` **.** Previously, connections handlers were canceled. Now, connections are closed with close code 1001 (going away). From the perspective of the @@ -177,7 +183,7 @@ Also: .. note:: - **Version 7.0 changes how a** :meth:`~protocol.WebSocketCommonProtocol.ping` + **Version 7.0 changes how a** :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` **that hasn't received a pong yet behaves when the connection is closed.** The ping — as in ``ping = await websocket.ping()`` — used to be canceled @@ -188,7 +194,7 @@ Also: .. note:: **Version 7.0 raises a** :exc:`RuntimeError` **exception if two coroutines - call** :meth:`~protocol.WebSocketCommonProtocol.recv` **concurrently.** + call** :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` **concurrently.** Concurrent calls lead to non-deterministic behavior because there are no guarantees about which coroutine will receive which message. @@ -197,17 +203,17 @@ Also: * ``websockets`` sends Ping frames at regular intervals and closes the connection if it doesn't receive a matching Pong frame. See - :class:`~protocol.WebSocketCommonProtocol` for details. + :class:`~legacy.protocol.WebSocketCommonProtocol` for details. * Added ``process_request`` and ``select_subprotocol`` arguments to - :func:`~server.serve` and :class:`~server.WebSocketServerProtocol` to - customize :meth:`~server.WebSocketServerProtocol.process_request` and - :meth:`~server.WebSocketServerProtocol.select_subprotocol` without - subclassing :class:`~server.WebSocketServerProtocol`. + :func:`~legacy.server.serve` and :class:`~legacy.server.WebSocketServerProtocol` to + customize :meth:`~legacy.server.WebSocketServerProtocol.process_request` and + :meth:`~legacy.server.WebSocketServerProtocol.select_subprotocol` without + subclassing :class:`~legacy.server.WebSocketServerProtocol`. * Added support for sending fragmented messages. -* Added the :meth:`~protocol.WebSocketCommonProtocol.wait_closed` method to +* Added the :meth:`~legacy.protocol.WebSocketCommonProtocol.wait_closed` method to protocols. * Added an interactive client: ``python -m websockets ``. @@ -215,7 +221,7 @@ Also: * Changed the ``origins`` argument to represent the lack of an origin with ``None`` rather than ``''``. -* Fixed a data loss bug in :meth:`~protocol.WebSocketCommonProtocol.recv`: +* Fixed a data loss bug in :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`: canceling it at the wrong time could result in messages being dropped. * Improved handling of multiple HTTP headers with the same name. @@ -230,18 +236,18 @@ Also: **Version 6.0 introduces the** :class:`~http.Headers` **class for managing HTTP headers and changes several public APIs:** - * :meth:`~server.WebSocketServerProtocol.process_request` now receives a + * :meth:`~legacy.server.WebSocketServerProtocol.process_request` now receives a :class:`~http.Headers` instead of a :class:`~http.client.HTTPMessage` in the ``request_headers`` argument. - * The :attr:`~protocol.WebSocketCommonProtocol.request_headers` and - :attr:`~protocol.WebSocketCommonProtocol.response_headers` attributes of - :class:`~protocol.WebSocketCommonProtocol` are :class:`~http.Headers` + * The :attr:`~legacy.protocol.WebSocketCommonProtocol.request_headers` and + :attr:`~legacy.protocol.WebSocketCommonProtocol.response_headers` attributes of + :class:`~legacy.protocol.WebSocketCommonProtocol` are :class:`~http.Headers` instead of :class:`~http.client.HTTPMessage`. - * The :attr:`~protocol.WebSocketCommonProtocol.raw_request_headers` and - :attr:`~protocol.WebSocketCommonProtocol.raw_response_headers` - attributes of :class:`~protocol.WebSocketCommonProtocol` are removed. + * The :attr:`~legacy.protocol.WebSocketCommonProtocol.raw_request_headers` and + :attr:`~legacy.protocol.WebSocketCommonProtocol.raw_response_headers` + attributes of :class:`~legacy.protocol.WebSocketCommonProtocol` are removed. Use :meth:`~http.Headers.raw_items` instead. * Functions defined in the :mod:`~handshake` module now receive @@ -265,7 +271,7 @@ Also: ..... * Fixed a regression in the 5.0 release that broke some invocations of - :func:`~server.serve()` and :func:`~client.connect`. + :func:`~legacy.server.serve()` and :func:`~legacy.client.connect`. 5.0 ... @@ -290,7 +296,7 @@ Also: Also: -* :func:`~client.connect` performs HTTP Basic Auth when the URI contains +* :func:`~legacy.client.connect` performs HTTP Basic Auth when the URI contains credentials. * Iterating on incoming messages no longer raises an exception when the @@ -299,13 +305,13 @@ Also: * A plain HTTP request now receives a 426 Upgrade Required response and doesn't log a stack trace. -* :func:`~server.unix_serve` can be used as an asynchronous context manager on +* :func:`~legacy.server.unix_serve` can be used as an asynchronous context manager on Python ≥ 3.5.1. -* Added the :attr:`~protocol.WebSocketCommonProtocol.closed` property to +* Added the :attr:`~legacy.protocol.WebSocketCommonProtocol.closed` property to protocols. -* If a :meth:`~protocol.WebSocketCommonProtocol.ping` doesn't receive a pong, +* If a :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` doesn't receive a pong, it's canceled when the connection is closed. * Reported the cause of :exc:`~exceptions.ConnectionClosed` exceptions. @@ -346,7 +352,7 @@ Also: Compression should improve performance but it increases RAM and CPU use. If you want to disable compression, add ``compression=None`` when calling - :func:`~server.serve()` or :func:`~client.connect`. + :func:`~legacy.server.serve()` or :func:`~legacy.client.connect`. .. warning:: @@ -360,13 +366,13 @@ Also: Also: -* :class:`~protocol.WebSocketCommonProtocol` instances can be used as +* :class:`~legacy.protocol.WebSocketCommonProtocol` instances can be used as asynchronous iterators on Python ≥ 3.6. They yield incoming messages. -* Added :func:`~server.unix_serve` for listening on Unix sockets. +* Added :func:`~legacy.server.unix_serve` for listening on Unix sockets. -* Added the :attr:`~server.WebSocketServer.sockets` attribute to the return - value of :func:`~server.serve`. +* Added the :attr:`~legacy.server.WebSocketServer.sockets` attribute to the return + value of :func:`~legacy.server.serve`. * Reorganized and extended documentation. @@ -384,15 +390,15 @@ Also: 3.4 ... -* Renamed :func:`~server.serve()` and :func:`~client.connect`'s ``klass`` +* Renamed :func:`~legacy.server.serve()` and :func:`~legacy.client.connect`'s ``klass`` argument to ``create_protocol`` to reflect that it can also be a callable. For backwards compatibility, ``klass`` is still supported. -* :func:`~server.serve` can be used as an asynchronous context manager on +* :func:`~legacy.server.serve` can be used as an asynchronous context manager on Python ≥ 3.5.1. * Added support for customizing handling of incoming connections with - :meth:`~server.WebSocketServerProtocol.process_request`. + :meth:`~legacy.server.WebSocketServerProtocol.process_request`. * Made read and write buffer sizes configurable. @@ -400,10 +406,10 @@ Also: * Added an optional C extension to speed up low-level operations. -* An invalid response status code during :func:`~client.connect` now raises +* An invalid response status code during :func:`~legacy.client.connect` now raises :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute. -* Providing a ``sock`` argument to :func:`~client.connect` no longer +* Providing a ``sock`` argument to :func:`~legacy.client.connect` no longer crashes. 3.3 @@ -419,7 +425,7 @@ Also: ... * Added ``timeout``, ``max_size``, and ``max_queue`` arguments to - :func:`~client.connect()` and :func:`~server.serve`. + :func:`~legacy.client.connect()` and :func:`~legacy.server.serve`. * Made server shutdown more robust. @@ -436,11 +442,11 @@ Also: .. warning:: **Version 3.0 introduces a backwards-incompatible change in the** - :meth:`~protocol.WebSocketCommonProtocol.recv` **API.** + :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` **API.** **If you're upgrading from 2.x or earlier, please read this carefully.** - :meth:`~protocol.WebSocketCommonProtocol.recv` used to return ``None`` + :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` used to return ``None`` when the connection was closed. This required checking the return value of every call:: @@ -459,20 +465,20 @@ Also: In order to avoid stranding projects built upon an earlier version, the previous behavior can be restored by passing ``legacy_recv=True`` to - :func:`~server.serve`, :func:`~client.connect`, - :class:`~server.WebSocketServerProtocol`, or - :class:`~client.WebSocketClientProtocol`. ``legacy_recv`` isn't documented + :func:`~legacy.server.serve`, :func:`~legacy.client.connect`, + :class:`~legacy.server.WebSocketServerProtocol`, or + :class:`~legacy.client.WebSocketClientProtocol`. ``legacy_recv`` isn't documented in their signatures but isn't scheduled for deprecation either. Also: -* :func:`~client.connect` can be used as an asynchronous context manager on +* :func:`~legacy.client.connect` can be used as an asynchronous context manager on Python ≥ 3.5.1. * Updated documentation with ``await`` and ``async`` syntax from Python 3.5. -* :meth:`~protocol.WebSocketCommonProtocol.ping` and - :meth:`~protocol.WebSocketCommonProtocol.pong` support data passed as +* :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` and + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` support data passed as :class:`str` in addition to :class:`bytes`. * Worked around an asyncio bug affecting connection termination under load. @@ -511,7 +517,7 @@ Also: * Returned a 403 status code instead of 400 when the request Origin isn't allowed. -* Canceling :meth:`~protocol.WebSocketCommonProtocol.recv` no longer drops +* Canceling :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` no longer drops the next message. * Clarified that the closing handshake can be initiated by the client. @@ -529,8 +535,8 @@ Also: * Supported non-default event loop. -* Added ``loop`` argument to :func:`~client.connect` and - :func:`~server.serve`. +* Added ``loop`` argument to :func:`~legacy.client.connect` and + :func:`~legacy.server.serve`. 2.3 ... @@ -557,9 +563,9 @@ Also: .. warning:: **Version 2.0 introduces a backwards-incompatible change in the** - :meth:`~protocol.WebSocketCommonProtocol.send`, - :meth:`~protocol.WebSocketCommonProtocol.ping`, and - :meth:`~protocol.WebSocketCommonProtocol.pong` **APIs.** + :meth:`~legacy.protocol.WebSocketCommonProtocol.send`, + :meth:`~legacy.protocol.WebSocketCommonProtocol.ping`, and + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` **APIs.** **If you're upgrading from 1.x or earlier, please read this carefully.** diff --git a/docs/cheatsheet.rst b/docs/cheatsheet.rst index 4b95c9eea..a71f08d74 100644 --- a/docs/cheatsheet.rst +++ b/docs/cheatsheet.rst @@ -9,24 +9,24 @@ Server * Write a coroutine that handles a single connection. It receives a WebSocket protocol instance and the URI path in argument. - * Call :meth:`~protocol.WebSocketCommonProtocol.recv` and - :meth:`~protocol.WebSocketCommonProtocol.send` to receive and send + * Call :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` and + :meth:`~legacy.protocol.WebSocketCommonProtocol.send` to receive and send messages at any time. - * When :meth:`~protocol.WebSocketCommonProtocol.recv` or - :meth:`~protocol.WebSocketCommonProtocol.send` raises + * When :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` or + :meth:`~legacy.protocol.WebSocketCommonProtocol.send` raises :exc:`~exceptions.ConnectionClosed`, clean up and exit. If you started other :class:`asyncio.Task`, terminate them before exiting. - * If you aren't awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`, - consider awaiting :meth:`~protocol.WebSocketCommonProtocol.wait_closed` + * If you aren't awaiting :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`, + consider awaiting :meth:`~legacy.protocol.WebSocketCommonProtocol.wait_closed` to detect quickly when the connection is closed. - * You may :meth:`~protocol.WebSocketCommonProtocol.ping` or - :meth:`~protocol.WebSocketCommonProtocol.pong` if you wish but it isn't + * You may :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` or + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` if you wish but it isn't needed in general. -* Create a server with :func:`~server.serve` which is similar to asyncio's +* Create a server with :func:`~legacy.server.serve` which is similar to asyncio's :meth:`~asyncio.AbstractEventLoop.create_server`. You can also use it as an asynchronous context manager. @@ -35,30 +35,30 @@ Server handler exits normally or with an exception. * For advanced customization, you may subclass - :class:`~server.WebSocketServerProtocol` and pass either this subclass or + :class:`~legacy.server.WebSocketServerProtocol` and pass either this subclass or a factory function as the ``create_protocol`` argument. Client ------ -* Create a client with :func:`~client.connect` which is similar to asyncio's +* Create a client with :func:`~legacy.client.connect` which is similar to asyncio's :meth:`~asyncio.BaseEventLoop.create_connection`. You can also use it as an asynchronous context manager. * For advanced customization, you may subclass - :class:`~server.WebSocketClientProtocol` and pass either this subclass or + :class:`~legacy.server.WebSocketClientProtocol` and pass either this subclass or a factory function as the ``create_protocol`` argument. -* Call :meth:`~protocol.WebSocketCommonProtocol.recv` and - :meth:`~protocol.WebSocketCommonProtocol.send` to receive and send messages +* Call :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` and + :meth:`~legacy.protocol.WebSocketCommonProtocol.send` to receive and send messages at any time. -* You may :meth:`~protocol.WebSocketCommonProtocol.ping` or - :meth:`~protocol.WebSocketCommonProtocol.pong` if you wish but it isn't +* You may :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` or + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` if you wish but it isn't needed in general. -* If you aren't using :func:`~client.connect` as a context manager, call - :meth:`~protocol.WebSocketCommonProtocol.close` to terminate the connection. +* If you aren't using :func:`~legacy.client.connect` as a context manager, call + :meth:`~legacy.protocol.WebSocketCommonProtocol.close` to terminate the connection. .. _debugging: diff --git a/docs/deployment.rst b/docs/deployment.rst index 5b05afff1..ed025094d 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -24,7 +24,7 @@ Graceful shutdown You may want to close connections gracefully when shutting down the server, perhaps after executing some cleanup logic. There are two ways to achieve this -with the object returned by :func:`~server.serve`: +with the object returned by :func:`~legacy.server.serve`: - using it as a asynchronous context manager, or - calling its ``close()`` method, then waiting for its ``wait_closed()`` @@ -132,7 +132,7 @@ Under high load, if a server receives more messages than it can process, bufferbloat can result in excessive memory use. By default ``websockets`` has generous limits. It is strongly recommended to -adapt them to your application. When you call :func:`~server.serve`: +adapt them to your application. When you call :func:`~legacy.server.serve`: - Set ``max_size`` (default: 1 MiB, UTF-8 encoded) to the maximum size of messages your application generates. @@ -155,7 +155,7 @@ The author of ``websockets`` doesn't think that's a good idea, due to the widely different operational characteristics of HTTP and WebSocket. ``websockets`` provide minimal support for responding to HTTP requests with -the :meth:`~server.WebSocketServerProtocol.process_request` hook. Typical +the :meth:`~legacy.server.WebSocketServerProtocol.process_request` hook. Typical use cases include health checks. Here's an example: .. literalinclude:: ../example/health_check_server.py diff --git a/docs/design.rst b/docs/design.rst index 74279b87f..f2718370d 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -32,20 +32,20 @@ WebSocket connections go through a trivial state machine: Transitions happen in the following places: - ``CONNECTING -> OPEN``: in - :meth:`~protocol.WebSocketCommonProtocol.connection_open` which runs when + :meth:`~legacy.protocol.WebSocketCommonProtocol.connection_open` which runs when the :ref:`opening handshake ` completes and the WebSocket connection is established — not to be confused with :meth:`~asyncio.Protocol.connection_made` which runs when the TCP connection is established; - ``OPEN -> CLOSING``: in - :meth:`~protocol.WebSocketCommonProtocol.write_frame` immediately before + :meth:`~legacy.protocol.WebSocketCommonProtocol.write_frame` immediately before sending a close frame; since receiving a close frame triggers sending a close frame, this does the right thing regardless of which side started the :ref:`closing handshake `; also in - :meth:`~protocol.WebSocketCommonProtocol.fail_connection` which duplicates + :meth:`~legacy.protocol.WebSocketCommonProtocol.fail_connection` which duplicates a few lines of code from ``write_close_frame()`` and ``write_frame()``; - ``* -> CLOSED``: in - :meth:`~protocol.WebSocketCommonProtocol.connection_lost` which is always + :meth:`~legacy.protocol.WebSocketCommonProtocol.connection_lost` which is always called exactly once when the TCP connection is closed. Coroutines @@ -58,36 +58,36 @@ connection lifecycle on the client side. :target: _images/lifecycle.svg The lifecycle is identical on the server side, except inversion of control -makes the equivalent of :meth:`~client.connect` implicit. +makes the equivalent of :meth:`~legacy.client.connect` implicit. Coroutines shown in green are called by the application. Multiple coroutines may interact with the WebSocket connection concurrently. Coroutines shown in gray manage the connection. When the opening handshake -succeeds, :meth:`~protocol.WebSocketCommonProtocol.connection_open` starts +succeeds, :meth:`~legacy.protocol.WebSocketCommonProtocol.connection_open` starts two tasks: -- :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` runs - :meth:`~protocol.WebSocketCommonProtocol.transfer_data` which handles - incoming data and lets :meth:`~protocol.WebSocketCommonProtocol.recv` +- :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` runs + :meth:`~legacy.protocol.WebSocketCommonProtocol.transfer_data` which handles + incoming data and lets :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` consume it. It may be canceled to terminate the connection. It never exits with an exception other than :exc:`~asyncio.CancelledError`. See :ref:`data transfer ` below. -- :attr:`~protocol.WebSocketCommonProtocol.keepalive_ping_task` runs - :meth:`~protocol.WebSocketCommonProtocol.keepalive_ping` which sends Ping +- :attr:`~legacy.protocol.WebSocketCommonProtocol.keepalive_ping_task` runs + :meth:`~legacy.protocol.WebSocketCommonProtocol.keepalive_ping` which sends Ping frames at regular intervals and ensures that corresponding Pong frames are received. It is canceled when the connection terminates. It never exits with an exception other than :exc:`~asyncio.CancelledError`. -- :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` runs - :meth:`~protocol.WebSocketCommonProtocol.close_connection` which waits for +- :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` runs + :meth:`~legacy.protocol.WebSocketCommonProtocol.close_connection` which waits for the data transfer to terminate, then takes care of closing the TCP connection. It must not be canceled. It never exits with an exception. See :ref:`connection termination ` below. -Besides, :meth:`~protocol.WebSocketCommonProtocol.fail_connection` starts -the same :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` when +Besides, :meth:`~legacy.protocol.WebSocketCommonProtocol.fail_connection` starts +the same :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` when the opening handshake fails, in order to close the TCP connection. Splitting the responsibilities between two tasks makes it easier to guarantee @@ -99,11 +99,11 @@ that ``websockets`` can terminate connections: regardless of whether the connection terminates normally or abnormally. -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` completes when no +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` completes when no more data will be received on the connection. Under normal circumstances, it exits after exchanging close frames. -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` completes when +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` completes when the TCP connection is closed. @@ -113,7 +113,7 @@ Opening handshake ----------------- ``websockets`` performs the opening handshake when establishing a WebSocket -connection. On the client side, :meth:`~client.connect` executes it before +connection. On the client side, :meth:`~legacy.client.connect` executes it before returning the protocol to the caller. On the server side, it's executed before passing the protocol to the ``ws_handler`` coroutine handling the connection. @@ -122,26 +122,26 @@ request and the server replies with an HTTP Switching Protocols response — ``websockets`` aims at keeping the implementation of both sides consistent with one another. -On the client side, :meth:`~client.WebSocketClientProtocol.handshake`: +On the client side, :meth:`~legacy.client.WebSocketClientProtocol.handshake`: - builds a HTTP request based on the ``uri`` and parameters passed to - :meth:`~client.connect`; + :meth:`~legacy.client.connect`; - writes the HTTP request to the network; - reads a HTTP response from the network; - checks the HTTP response, validates ``extensions`` and ``subprotocol``, and configures the protocol accordingly; - moves to the ``OPEN`` state. -On the server side, :meth:`~server.WebSocketServerProtocol.handshake`: +On the server side, :meth:`~legacy.server.WebSocketServerProtocol.handshake`: - reads a HTTP request from the network; -- calls :meth:`~server.WebSocketServerProtocol.process_request` which may +- calls :meth:`~legacy.server.WebSocketServerProtocol.process_request` which may abort the WebSocket handshake and return a HTTP response instead; this hook only makes sense on the server side; - checks the HTTP request, negotiates ``extensions`` and ``subprotocol``, and configures the protocol accordingly; - builds a HTTP response based on the above and parameters passed to - :meth:`~server.serve`; + :meth:`~legacy.server.serve`; - writes the HTTP response to the network; - moves to the ``OPEN`` state; - returns the ``path`` part of the ``uri``. @@ -177,16 +177,16 @@ differences between a server and a client: These differences are so minor that all the logic for `data framing`_, for `sending and receiving data`_ and for `closing the connection`_ is implemented -in the same class, :class:`~protocol.WebSocketCommonProtocol`. +in the same class, :class:`~legacy.protocol.WebSocketCommonProtocol`. .. _data framing: https://tools.ietf.org/html/rfc6455#section-5 .. _sending and receiving data: https://tools.ietf.org/html/rfc6455#section-6 .. _closing the connection: https://tools.ietf.org/html/rfc6455#section-7 -The :attr:`~protocol.WebSocketCommonProtocol.is_client` attribute tells which +The :attr:`~legacy.protocol.WebSocketCommonProtocol.is_client` attribute tells which side a protocol instance is managing. This attribute is defined on the -:attr:`~server.WebSocketServerProtocol` and -:attr:`~client.WebSocketClientProtocol` classes. +:attr:`~legacy.server.WebSocketServerProtocol` and +:attr:`~legacy.client.WebSocketClientProtocol` classes. Data flow ......... @@ -210,11 +210,11 @@ The left side of the diagram shows how ``websockets`` receives data. Incoming data is written to a :class:`~asyncio.StreamReader` in order to implement flow control and provide backpressure on the TCP connection. -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, which is started +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task`, which is started when the WebSocket connection is established, processes this data. When it receives data frames, it reassembles fragments and puts the resulting -messages in the :attr:`~protocol.WebSocketCommonProtocol.messages` queue. +messages in the :attr:`~legacy.protocol.WebSocketCommonProtocol.messages` queue. When it encounters a control frame: @@ -226,11 +226,11 @@ When it encounters a control frame: Running this process in a task guarantees that control frames are processed promptly. Without such a task, ``websockets`` would depend on the application to drive the connection by having exactly one coroutine awaiting -:meth:`~protocol.WebSocketCommonProtocol.recv` at any time. While this +:meth:`~legacy.protocol.WebSocketCommonProtocol.recv` at any time. While this happens naturally in many use cases, it cannot be relied upon. -Then :meth:`~protocol.WebSocketCommonProtocol.recv` fetches the next message -from the :attr:`~protocol.WebSocketCommonProtocol.messages` queue, with some +Then :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` fetches the next message +from the :attr:`~legacy.protocol.WebSocketCommonProtocol.messages` queue, with some complexity added for handling backpressure and termination correctly. Sending data @@ -238,18 +238,18 @@ Sending data The right side of the diagram shows how ``websockets`` sends data. -:meth:`~protocol.WebSocketCommonProtocol.send` writes one or several data +:meth:`~legacy.protocol.WebSocketCommonProtocol.send` writes one or several data frames containing the message. While sending a fragmented message, concurrent -calls to :meth:`~protocol.WebSocketCommonProtocol.send` are put on hold until +calls to :meth:`~legacy.protocol.WebSocketCommonProtocol.send` are put on hold until all fragments are sent. This makes concurrent calls safe. -:meth:`~protocol.WebSocketCommonProtocol.ping` writes a ping frame and +:meth:`~legacy.protocol.WebSocketCommonProtocol.ping` writes a ping frame and yields a :class:`~asyncio.Future` which will be completed when a matching pong frame is received. -:meth:`~protocol.WebSocketCommonProtocol.pong` writes a pong frame. +:meth:`~legacy.protocol.WebSocketCommonProtocol.pong` writes a pong frame. -:meth:`~protocol.WebSocketCommonProtocol.close` writes a close frame and +:meth:`~legacy.protocol.WebSocketCommonProtocol.close` writes a close frame and waits for the TCP connection to terminate. Outgoing data is written to a :class:`~asyncio.StreamWriter` in order to @@ -261,17 +261,17 @@ Closing handshake ................. When the other side of the connection initiates the closing handshake, -:meth:`~protocol.WebSocketCommonProtocol.read_message` receives a close +:meth:`~legacy.protocol.WebSocketCommonProtocol.read_message` receives a close frame while in the ``OPEN`` state. It moves to the ``CLOSING`` state, sends a close frame, and returns ``None``, causing -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. When this side of the connection initiates the closing handshake with -:meth:`~protocol.WebSocketCommonProtocol.close`, it moves to the ``CLOSING`` +:meth:`~legacy.protocol.WebSocketCommonProtocol.close`, it moves to the ``CLOSING`` state and sends a close frame. When the other side sends a close frame, -:meth:`~protocol.WebSocketCommonProtocol.read_message` receives it in the +:meth:`~legacy.protocol.WebSocketCommonProtocol.read_message` receives it in the ``CLOSING`` state and returns ``None``, also causing -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` to terminate. If the other side doesn't send a close frame within the connection's close timeout, ``websockets`` :ref:`fails the connection `. @@ -288,31 +288,31 @@ Then ``websockets`` terminates the TCP connection. Connection termination ---------------------- -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task`, which is +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task`, which is started when the WebSocket connection is established, is responsible for eventually closing the TCP connection. -First :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` waits -for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to terminate, +First :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` waits +for :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` to terminate, which may happen as a result of: - a successful closing handshake: as explained above, this exits the infinite - loop in :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`; + loop in :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task`; - a timeout while waiting for the closing handshake to complete: this cancels - :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`; + :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task`; - a protocol error, including connection errors: depending on the exception, - :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` :ref:`fails the + :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` :ref:`fails the connection ` with a suitable code and exits. -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` is separate -from :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` to make it +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` is separate +from :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` to make it easier to implement the timeout on the closing handshake. Canceling -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` creates no risk -of canceling :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` creates no risk +of canceling :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` and failing to close the TCP connection, thus leaking resources. -Then :attr:`~protocol.WebSocketCommonProtocol.close_connection_task` cancels -:attr:`~protocol.WebSocketCommonProtocol.keepalive_ping`. This task has no +Then :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` cancels +:attr:`~legacy.protocol.WebSocketCommonProtocol.keepalive_ping`. This task has no protocol compliance responsibilities. Terminating it to avoid leaking it is the only concern. @@ -334,11 +334,11 @@ If the opening handshake doesn't complete successfully, ``websockets`` fails the connection by closing the TCP connection. Once the opening handshake has completed, ``websockets`` fails the connection -by canceling :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` and +by canceling :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` and sending a close frame if appropriate. -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` exits, unblocking -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task`, which closes +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` exits, unblocking +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task`, which closes the TCP connection. @@ -414,45 +414,45 @@ happen on the client side. On the server side, the opening handshake is managed by ``websockets`` and nothing results in a cancellation. Once the WebSocket connection is established, internal tasks -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` and -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` mustn't get +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` and +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` mustn't get accidentally canceled if a coroutine that awaits them is canceled. In other words, they must be shielded from cancellation. -:meth:`~protocol.WebSocketCommonProtocol.recv` waits for the next message in -the queue or for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` +:meth:`~legacy.protocol.WebSocketCommonProtocol.recv` waits for the next message in +the queue or for :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` to terminate, whichever comes first. It relies on :func:`~asyncio.wait` for waiting on two futures in parallel. As a consequence, even though it's waiting on a :class:`~asyncio.Future` signaling the next message and on -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`, it doesn't +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task`, it doesn't propagate cancellation to them. -:meth:`~protocol.WebSocketCommonProtocol.ensure_open` is called by -:meth:`~protocol.WebSocketCommonProtocol.send`, -:meth:`~protocol.WebSocketCommonProtocol.ping`, and -:meth:`~protocol.WebSocketCommonProtocol.pong`. When the connection state is +:meth:`~legacy.protocol.WebSocketCommonProtocol.ensure_open` is called by +:meth:`~legacy.protocol.WebSocketCommonProtocol.send`, +:meth:`~legacy.protocol.WebSocketCommonProtocol.ping`, and +:meth:`~legacy.protocol.WebSocketCommonProtocol.pong`. When the connection state is ``CLOSING``, it waits for -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` but shields it to +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` but shields it to prevent cancellation. -:meth:`~protocol.WebSocketCommonProtocol.close` waits for the data transfer +:meth:`~legacy.protocol.WebSocketCommonProtocol.close` waits for the data transfer task to terminate with :func:`~asyncio.wait_for`. If it's canceled or if the -timeout elapses, :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` +timeout elapses, :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` is canceled, which is correct at this point. -:meth:`~protocol.WebSocketCommonProtocol.close` then waits for -:attr:`~protocol.WebSocketCommonProtocol.close_connection_task` but shields it +:meth:`~legacy.protocol.WebSocketCommonProtocol.close` then waits for +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connection_task` but shields it to prevent cancellation. -:meth:`~protocol.WebSocketCommonProtocol.close` and -:func:`~protocol.WebSocketCommonProtocol.fail_connection` are the only -places where :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` may +:meth:`~legacy.protocol.WebSocketCommonProtocol.close` and +:func:`~legacy.protocol.WebSocketCommonProtocol.fail_connection` are the only +places where :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` may be canceled. -:attr:`~protocol.WebSocketCommonProtocol.close_connnection_task` starts by -waiting for :attr:`~protocol.WebSocketCommonProtocol.transfer_data_task`. It +:attr:`~legacy.protocol.WebSocketCommonProtocol.close_connnection_task` starts by +waiting for :attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task`. It catches :exc:`~asyncio.CancelledError` to prevent a cancellation of -:attr:`~protocol.WebSocketCommonProtocol.transfer_data_task` from propagating -to :attr:`~protocol.WebSocketCommonProtocol.close_connnection_task`. +:attr:`~legacy.protocol.WebSocketCommonProtocol.transfer_data_task` from propagating +to :attr:`~legacy.protocol.WebSocketCommonProtocol.close_connnection_task`. .. _backpressure: @@ -519,47 +519,47 @@ For each connection, the receiving side contains these buffers: - OS buffers: tuning them is an advanced optimization. - :class:`~asyncio.StreamReader` bytes buffer: the default limit is 64 KiB. You can set another limit by passing a ``read_limit`` keyword argument to - :func:`~client.connect()` or :func:`~server.serve`. + :func:`~legacy.client.connect()` or :func:`~legacy.server.serve`. - Incoming messages :class:`~collections.deque`: its size depends both on the size and the number of messages it contains. By default the maximum UTF-8 encoded size is 1 MiB and the maximum number is 32. In the worst case, after UTF-8 decoding, a single message could take up to 4 MiB of memory and the overall memory consumption could reach 128 MiB. You should adjust these limits by setting the ``max_size`` and ``max_queue`` keyword arguments of - :func:`~client.connect()` or :func:`~server.serve` according to your + :func:`~legacy.client.connect()` or :func:`~legacy.server.serve` according to your application's requirements. For each connection, the sending side contains these buffers: - :class:`~asyncio.StreamWriter` bytes buffer: the default size is 64 KiB. You can set another limit by passing a ``write_limit`` keyword argument to - :func:`~client.connect()` or :func:`~server.serve`. + :func:`~legacy.client.connect()` or :func:`~legacy.server.serve`. - OS buffers: tuning them is an advanced optimization. Concurrency ----------- -Awaiting any combination of :meth:`~protocol.WebSocketCommonProtocol.recv`, -:meth:`~protocol.WebSocketCommonProtocol.send`, -:meth:`~protocol.WebSocketCommonProtocol.close` -:meth:`~protocol.WebSocketCommonProtocol.ping`, or -:meth:`~protocol.WebSocketCommonProtocol.pong` concurrently is safe, including +Awaiting any combination of :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`, +:meth:`~legacy.protocol.WebSocketCommonProtocol.send`, +:meth:`~legacy.protocol.WebSocketCommonProtocol.close` +:meth:`~legacy.protocol.WebSocketCommonProtocol.ping`, or +:meth:`~legacy.protocol.WebSocketCommonProtocol.pong` concurrently is safe, including multiple calls to the same method, with one exception and one limitation. * **Only one coroutine can receive messages at a time.** This constraint avoids non-deterministic behavior (and simplifies the implementation). If a - coroutine is awaiting :meth:`~protocol.WebSocketCommonProtocol.recv`, + coroutine is awaiting :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`, awaiting it again in another coroutine raises :exc:`RuntimeError`. * **Sending a fragmented message forces serialization.** Indeed, the WebSocket protocol doesn't support multiplexing messages. If a coroutine is awaiting - :meth:`~protocol.WebSocketCommonProtocol.send` to send a fragmented message, + :meth:`~legacy.protocol.WebSocketCommonProtocol.send` to send a fragmented message, awaiting it again in another coroutine waits until the first call completes. This will be transparent in many cases. It may be a concern if the fragmented message is generated slowly by an asynchronous iterator. Receiving frames is independent from sending frames. This isolates -:meth:`~protocol.WebSocketCommonProtocol.recv`, which receives frames, from +:meth:`~legacy.protocol.WebSocketCommonProtocol.recv`, which receives frames, from the other methods, which send frames. While the connection is open, each frame is sent with a single write. Combined diff --git a/docs/extensions.rst b/docs/extensions.rst index 400034090..dea91219e 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -14,8 +14,9 @@ Per-Message Deflate, specified in :rfc:`7692`. Per-Message Deflate ------------------- -:func:`~server.serve()` and :func:`~client.connect` enable the Per-Message -Deflate extension by default. You can disable this with ``compression=None``. +:func:`~legacy.server.serve()` and :func:`~legacy.client.connect` enable the +Per-Message Deflate extension by default. You can disable this with +``compression=None``. You can also configure the Per-Message Deflate extension explicitly if you want to customize its parameters. diff --git a/docs/faq.rst b/docs/faq.rst index 4a083e2d0..eee14dda8 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -80,13 +80,13 @@ How do I get access HTTP headers, for example cookies? ...................................................... To access HTTP headers during the WebSocket handshake, you can override -:attr:`~server.WebSocketServerProtocol.process_request`:: +:attr:`~legacy.server.WebSocketServerProtocol.process_request`:: async def process_request(self, path, request_headers): cookies = request_header["Cookie"] Once the connection is established, they're available in -:attr:`~protocol.WebSocketServerProtocol.request_headers`:: +:attr:`~legacy.protocol.WebSocketServerProtocol.request_headers`:: async def handler(websocket, path): cookies = websocket.request_headers["Cookie"] @@ -94,7 +94,7 @@ Once the connection is established, they're available in How do I get the IP address of the client connecting to my server? .................................................................. -It's available in :attr:`~protocol.WebSocketCommonProtocol.remote_address`:: +It's available in :attr:`~legacy.protocol.WebSocketCommonProtocol.remote_address`:: async def handler(websocket, path): remote_ip = websocket.remote_address[0] @@ -121,7 +121,7 @@ Providing a HTTP server is out of scope for websockets. It only aims at providing a WebSocket server. There's limited support for returning HTTP responses with the -:attr:`~server.WebSocketServerProtocol.process_request` hook. +:attr:`~legacy.server.WebSocketServerProtocol.process_request` hook. If you need more, pick a HTTP server and run it separately. Client side diff --git a/docs/intro.rst b/docs/intro.rst index 8aaaeddca..c77139cab 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -143,7 +143,7 @@ For getting messages from a ``producer`` coroutine and sending them:: In this example, ``producer`` represents your business logic for generating messages to send on the WebSocket connection. -:meth:`~protocol.WebSocketCommonProtocol.send` raises a +:meth:`~legacy.protocol.WebSocketCommonProtocol.send` raises a :exc:`~exceptions.ConnectionClosed` exception when the client disconnects, which breaks out of the ``while True`` loop. diff --git a/setup.py b/setup.py index f35819247..85d899cb4 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ if sys.version_info[:3] < (3, 6, 1): raise Exception("websockets requires Python >= 3.6.1.") -packages = ['websockets', 'websockets/extensions'] +packages = ['websockets', 'websockets/legacy', 'websockets/extensions'] ext_modules = [ setuptools.Extension( diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index c4accaca1..0242e7942 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -1,11 +1,13 @@ # This relies on each of the submodules having an __all__ variable. -from .auth import * # noqa -from .client import * # noqa +from .client import * from .datastructures import * # noqa from .exceptions import * # noqa -from .protocol import * # noqa -from .server import * # noqa +from .legacy.auth import * # noqa +from .legacy.client import * # noqa +from .legacy.protocol import * # noqa +from .legacy.server import * # noqa +from .server import * from .typing import * # noqa from .uri import * # noqa from .version import version as __version__ # noqa diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index bce3e4bbb..d44e34e74 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -6,8 +6,8 @@ import threading from typing import Any, Set -from .client import connect from .exceptions import ConnectionClosed, format_close +from .legacy.client import connect if sys.platform == "win32": diff --git a/src/websockets/auth.py b/src/websockets/auth.py index c1b7a0b1a..c8839c401 100644 --- a/src/websockets/auth.py +++ b/src/websockets/auth.py @@ -1,165 +1,4 @@ -""" -:mod:`websockets.auth` provides HTTP Basic Authentication according to -:rfc:`7235` and :rfc:`7617`. - -""" - - -import functools -import http -from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast - -from .asyncio_server import HTTPResponse, WebSocketServerProtocol -from .datastructures import Headers -from .exceptions import InvalidHeader -from .headers import build_www_authenticate_basic, parse_authorization_basic +from .legacy.auth import BasicAuthWebSocketServerProtocol, basic_auth_protocol_factory __all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] - -Credentials = Tuple[str, str] - - -def is_credentials(value: Any) -> bool: - try: - username, password = value - except (TypeError, ValueError): - return False - else: - return isinstance(username, str) and isinstance(password, str) - - -class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol): - """ - WebSocket server protocol that enforces HTTP Basic Auth. - - """ - - def __init__( - self, - *args: Any, - realm: str, - check_credentials: Callable[[str, str], Awaitable[bool]], - **kwargs: Any, - ) -> None: - self.realm = realm - self.check_credentials = check_credentials - super().__init__(*args, **kwargs) - - async def process_request( - self, path: str, request_headers: Headers - ) -> Optional[HTTPResponse]: - """ - Check HTTP Basic Auth and return a HTTP 401 or 403 response if needed. - - If authentication succeeds, the username of the authenticated user is - stored in the ``username`` attribute. - - """ - try: - authorization = request_headers["Authorization"] - except KeyError: - return ( - http.HTTPStatus.UNAUTHORIZED, - [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], - b"Missing credentials\n", - ) - - try: - username, password = parse_authorization_basic(authorization) - except InvalidHeader: - return ( - http.HTTPStatus.UNAUTHORIZED, - [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], - b"Unsupported credentials\n", - ) - - if not await self.check_credentials(username, password): - return ( - http.HTTPStatus.UNAUTHORIZED, - [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], - b"Invalid credentials\n", - ) - - self.username = username - - return await super().process_request(path, request_headers) - - -def basic_auth_protocol_factory( - realm: str, - credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None, - check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, - create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None, -) -> Callable[[Any], BasicAuthWebSocketServerProtocol]: - """ - Protocol factory that enforces HTTP Basic Auth. - - ``basic_auth_protocol_factory`` is designed to integrate with - :func:`~websockets.server.serve` like this:: - - websockets.serve( - ..., - create_protocol=websockets.basic_auth_protocol_factory( - realm="my dev server", - credentials=("hello", "iloveyou"), - ) - ) - - ``realm`` indicates the scope of protection. It should contain only ASCII - characters because the encoding of non-ASCII characters is undefined. - Refer to section 2.2 of :rfc:`7235` for details. - - ``credentials`` defines hard coded authorized credentials. It can be a - ``(username, password)`` pair or a list of such pairs. - - ``check_credentials`` defines a coroutine that checks whether credentials - are authorized. This coroutine receives ``username`` and ``password`` - arguments and returns a :class:`bool`. - - One of ``credentials`` or ``check_credentials`` must be provided but not - both. - - By default, ``basic_auth_protocol_factory`` creates a factory for building - :class:`BasicAuthWebSocketServerProtocol` instances. You can override this - with the ``create_protocol`` parameter. - - :param realm: scope of protection - :param credentials: hard coded credentials - :param check_credentials: coroutine that verifies credentials - :raises TypeError: if the credentials argument has the wrong type - - """ - if (credentials is None) == (check_credentials is None): - raise TypeError("provide either credentials or check_credentials") - - if credentials is not None: - if is_credentials(credentials): - - async def check_credentials(username: str, password: str) -> bool: - return (username, password) == credentials - - elif isinstance(credentials, Iterable): - credentials_list = list(credentials) - if all(is_credentials(item) for item in credentials_list): - credentials_dict = dict(credentials_list) - - async def check_credentials(username: str, password: str) -> bool: - return credentials_dict.get(username) == password - - else: - raise TypeError(f"invalid credentials argument: {credentials}") - - else: - raise TypeError(f"invalid credentials argument: {credentials}") - - if create_protocol is None: - # Not sure why mypy cannot figure this out. - create_protocol = cast( - Callable[[Any], BasicAuthWebSocketServerProtocol], - BasicAuthWebSocketServerProtocol, - ) - - return functools.partial( - create_protocol, realm=realm, check_credentials=check_credentials - ) diff --git a/src/websockets/client.py b/src/websockets/client.py index b7e407a45..8cababed5 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -2,7 +2,6 @@ import logging from typing import Generator, List, Optional, Sequence -from .asyncio_client import WebSocketClientProtocol, connect, unix_connect from .connection import CLIENT, CONNECTING, OPEN, Connection from .datastructures import Headers, HeadersLike, MultipleValuesError from .exceptions import ( @@ -25,6 +24,7 @@ ) from .http import USER_AGENT, build_host from .http11 import Request, Response +from .legacy.client import WebSocketClientProtocol, connect, unix_connect # noqa from .typing import ( ConnectionOption, ExtensionHeader, @@ -36,12 +36,7 @@ from .utils import accept_key, generate_key -__all__ = [ - "connect", - "unix_connect", - "ClientConnection", - "WebSocketClientProtocol", -] +__all__ = ["ClientConnection"] logger = logging.getLogger(__name__) @@ -64,7 +59,7 @@ def __init__( self.extra_headers = extra_headers self.key = generate_key() - def connect(self) -> Request: + def connect(self) -> Request: # noqa: F811 """ Create a WebSocket handshake request event to send to the server. diff --git a/src/websockets/exceptions.py b/src/websockets/exceptions.py index bdadae05e..e0860c743 100644 --- a/src/websockets/exceptions.py +++ b/src/websockets/exceptions.py @@ -301,7 +301,7 @@ class AbortHandshake(InvalidHandshake): This exception is an implementation detail. - The public API is :meth:`~server.WebSocketServerProtocol.process_request`. + The public API is :meth:`~legacy.server.WebSocketServerProtocol.process_request`. """ diff --git a/src/websockets/framing.py b/src/websockets/framing.py index b2996d788..2dadb5610 100644 --- a/src/websockets/framing.py +++ b/src/websockets/framing.py @@ -1,139 +1,6 @@ -""" -:mod:`websockets.framing` reads and writes WebSocket frames. - -It deals with a single frame at a time. Anything that depends on the sequence -of frames is implemented in :mod:`websockets.protocol`. - -See `section 5 of RFC 6455`_. - -.. _section 5 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-5 - -""" - -import struct import warnings -from typing import Any, Awaitable, Callable, Optional, Sequence - -from .exceptions import PayloadTooBig, ProtocolError -from .frames import Frame as NewFrame, Opcode - -try: - from .speedups import apply_mask -except ImportError: # pragma: no cover - from .utils import apply_mask +from .legacy.framing import * # noqa warnings.warn("websockets.framing is deprecated", DeprecationWarning) - - -class Frame(NewFrame): - @classmethod - async def read( - cls, - reader: Callable[[int], Awaitable[bytes]], - *, - mask: bool, - max_size: Optional[int] = None, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, - ) -> "Frame": - """ - Read a WebSocket frame. - - :param reader: coroutine that reads exactly the requested number of - bytes, unless the end of file is reached - :param mask: whether the frame should be masked i.e. whether the read - happens on the server side - :param max_size: maximum payload size in bytes - :param extensions: list of classes with a ``decode()`` method that - transforms the frame and return a new frame; extensions are applied - in reverse order - :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds - ``max_size`` - :raises ~websockets.exceptions.ProtocolError: if the frame - contains incorrect values - - """ - - # Read the header. - data = await reader(2) - head1, head2 = struct.unpack("!BB", data) - - # While not Pythonic, this is marginally faster than calling bool(). - fin = True if head1 & 0b10000000 else False - rsv1 = True if head1 & 0b01000000 else False - rsv2 = True if head1 & 0b00100000 else False - rsv3 = True if head1 & 0b00010000 else False - - try: - opcode = Opcode(head1 & 0b00001111) - except ValueError as exc: - raise ProtocolError("invalid opcode") from exc - - if (True if head2 & 0b10000000 else False) != mask: - raise ProtocolError("incorrect masking") - - length = head2 & 0b01111111 - if length == 126: - data = await reader(2) - (length,) = struct.unpack("!H", data) - elif length == 127: - data = await reader(8) - (length,) = struct.unpack("!Q", data) - if max_size is not None and length > max_size: - raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)") - if mask: - mask_bits = await reader(4) - - # Read the data. - data = await reader(length) - if mask: - data = apply_mask(data, mask_bits) - - frame = cls(fin, opcode, data, rsv1, rsv2, rsv3) - - if extensions is None: - extensions = [] - for extension in reversed(extensions): - frame = cls(*extension.decode(frame, max_size=max_size)) - - frame.check() - - return frame - - def write( - self, - write: Callable[[bytes], Any], - *, - mask: bool, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, - ) -> None: - """ - Write a WebSocket frame. - - :param frame: frame to write - :param write: function that writes bytes - :param mask: whether the frame should be masked i.e. whether the write - happens on the client side - :param extensions: list of classes with an ``encode()`` method that - transform the frame and return a new frame; extensions are applied - in order - :raises ~websockets.exceptions.ProtocolError: if the frame - contains incorrect values - - """ - # The frame is written in a single call to write in order to prevent - # TCP fragmentation. See #68 for details. This also makes it safe to - # send frames concurrently from multiple coroutines. - write(self.serialize(mask=mask, extensions=extensions)) - - -# Backwards compatibility with previously documented public APIs -from .frames import parse_close # isort:skip # noqa -from .frames import prepare_ctrl as encode_data # isort:skip # noqa -from .frames import prepare_data # isort:skip # noqa -from .frames import serialize_close # isort:skip # noqa - - -# at the bottom to allow circular import, because Extension depends on Frame -import websockets.extensions.base # isort:skip # noqa diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py index 3ff6c005d..cc4010d41 100644 --- a/src/websockets/handshake.py +++ b/src/websockets/handshake.py @@ -13,7 +13,7 @@ def build_request(headers: Headers) -> str: # pragma: no cover warnings.warn( "websockets.handshake.build_request is deprecated", DeprecationWarning ) - from .handshake_legacy import build_request + from .legacy.handshake import build_request return build_request(headers) @@ -22,7 +22,7 @@ def check_request(headers: Headers) -> str: # pragma: no cover warnings.warn( "websockets.handshake.check_request is deprecated", DeprecationWarning ) - from .handshake_legacy import check_request + from .legacy.handshake import check_request return check_request(headers) @@ -31,7 +31,7 @@ def build_response(headers: Headers, key: str) -> None: # pragma: no cover warnings.warn( "websockets.handshake.build_response is deprecated", DeprecationWarning ) - from .handshake_legacy import build_response + from .legacy.handshake import build_response return build_response(headers, key) @@ -40,6 +40,6 @@ def check_response(headers: Headers, key: str) -> None: # pragma: no cover warnings.warn( "websockets.handshake.check_response is deprecated", DeprecationWarning ) - from .handshake_legacy import check_response + from .legacy.handshake import check_response return check_response(headers, key) diff --git a/src/websockets/http.py b/src/websockets/http.py index ed3fe48d0..b05b78455 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -47,7 +47,7 @@ async def read_request( stream: asyncio.StreamReader, ) -> Tuple[str, Headers]: # pragma: no cover warnings.warn("websockets.http.read_request is deprecated", DeprecationWarning) - from .http_legacy import read_request + from .legacy.http import read_request return await read_request(stream) @@ -56,6 +56,6 @@ async def read_response( stream: asyncio.StreamReader, ) -> Tuple[int, str, Headers]: # pragma: no cover warnings.warn("websockets.http.read_response is deprecated", DeprecationWarning) - from .http_legacy import read_response + from .legacy.http import read_response return await read_response(stream) diff --git a/src/websockets/legacy/__init__.py b/src/websockets/legacy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/websockets/legacy/auth.py b/src/websockets/legacy/auth.py new file mode 100644 index 000000000..8cb60429a --- /dev/null +++ b/src/websockets/legacy/auth.py @@ -0,0 +1,165 @@ +""" +:mod:`websockets.legacy.auth` provides HTTP Basic Authentication according to +:rfc:`7235` and :rfc:`7617`. + +""" + + +import functools +import http +from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast + +from ..datastructures import Headers +from ..exceptions import InvalidHeader +from ..headers import build_www_authenticate_basic, parse_authorization_basic +from .server import HTTPResponse, WebSocketServerProtocol + + +__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] + +Credentials = Tuple[str, str] + + +def is_credentials(value: Any) -> bool: + try: + username, password = value + except (TypeError, ValueError): + return False + else: + return isinstance(username, str) and isinstance(password, str) + + +class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol): + """ + WebSocket server protocol that enforces HTTP Basic Auth. + + """ + + def __init__( + self, + *args: Any, + realm: str, + check_credentials: Callable[[str, str], Awaitable[bool]], + **kwargs: Any, + ) -> None: + self.realm = realm + self.check_credentials = check_credentials + super().__init__(*args, **kwargs) + + async def process_request( + self, path: str, request_headers: Headers + ) -> Optional[HTTPResponse]: + """ + Check HTTP Basic Auth and return a HTTP 401 or 403 response if needed. + + If authentication succeeds, the username of the authenticated user is + stored in the ``username`` attribute. + + """ + try: + authorization = request_headers["Authorization"] + except KeyError: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Missing credentials\n", + ) + + try: + username, password = parse_authorization_basic(authorization) + except InvalidHeader: + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Unsupported credentials\n", + ) + + if not await self.check_credentials(username, password): + return ( + http.HTTPStatus.UNAUTHORIZED, + [("WWW-Authenticate", build_www_authenticate_basic(self.realm))], + b"Invalid credentials\n", + ) + + self.username = username + + return await super().process_request(path, request_headers) + + +def basic_auth_protocol_factory( + realm: str, + credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None, + check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None, + create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None, +) -> Callable[[Any], BasicAuthWebSocketServerProtocol]: + """ + Protocol factory that enforces HTTP Basic Auth. + + ``basic_auth_protocol_factory`` is designed to integrate with + :func:`~websockets.legacy.server.serve` like this:: + + websockets.serve( + ..., + create_protocol=websockets.basic_auth_protocol_factory( + realm="my dev server", + credentials=("hello", "iloveyou"), + ) + ) + + ``realm`` indicates the scope of protection. It should contain only ASCII + characters because the encoding of non-ASCII characters is undefined. + Refer to section 2.2 of :rfc:`7235` for details. + + ``credentials`` defines hard coded authorized credentials. It can be a + ``(username, password)`` pair or a list of such pairs. + + ``check_credentials`` defines a coroutine that checks whether credentials + are authorized. This coroutine receives ``username`` and ``password`` + arguments and returns a :class:`bool`. + + One of ``credentials`` or ``check_credentials`` must be provided but not + both. + + By default, ``basic_auth_protocol_factory`` creates a factory for building + :class:`BasicAuthWebSocketServerProtocol` instances. You can override this + with the ``create_protocol`` parameter. + + :param realm: scope of protection + :param credentials: hard coded credentials + :param check_credentials: coroutine that verifies credentials + :raises TypeError: if the credentials argument has the wrong type + + """ + if (credentials is None) == (check_credentials is None): + raise TypeError("provide either credentials or check_credentials") + + if credentials is not None: + if is_credentials(credentials): + + async def check_credentials(username: str, password: str) -> bool: + return (username, password) == credentials + + elif isinstance(credentials, Iterable): + credentials_list = list(credentials) + if all(is_credentials(item) for item in credentials_list): + credentials_dict = dict(credentials_list) + + async def check_credentials(username: str, password: str) -> bool: + return credentials_dict.get(username) == password + + else: + raise TypeError(f"invalid credentials argument: {credentials}") + + else: + raise TypeError(f"invalid credentials argument: {credentials}") + + if create_protocol is None: + # Not sure why mypy cannot figure this out. + create_protocol = cast( + Callable[[Any], BasicAuthWebSocketServerProtocol], + BasicAuthWebSocketServerProtocol, + ) + + return functools.partial( + create_protocol, realm=realm, check_credentials=check_credentials + ) diff --git a/src/websockets/asyncio_client.py b/src/websockets/legacy/client.py similarity index 97% rename from src/websockets/asyncio_client.py rename to src/websockets/legacy/client.py index 3f406170a..27f6e8209 100644 --- a/src/websockets/asyncio_client.py +++ b/src/websockets/legacy/client.py @@ -1,5 +1,5 @@ """ -:mod:`websockets.client` defines the WebSocket client APIs. +:mod:`websockets.legacy.client` defines the WebSocket client APIs. """ @@ -11,8 +11,8 @@ from types import TracebackType from typing import Any, Callable, Generator, List, Optional, Sequence, Tuple, Type, cast -from .datastructures import Headers, HeadersLike -from .exceptions import ( +from ..datastructures import Headers, HeadersLike +from ..exceptions import ( InvalidHandshake, InvalidHeader, InvalidMessage, @@ -21,21 +21,21 @@ RedirectHandshake, SecurityError, ) -from .extensions.base import ClientExtensionFactory, Extension -from .extensions.permessage_deflate import enable_client_permessage_deflate -from .handshake_legacy import build_request, check_response -from .headers import ( +from ..extensions.base import ClientExtensionFactory, Extension +from ..extensions.permessage_deflate import enable_client_permessage_deflate +from ..headers import ( build_authorization_basic, build_extension, build_subprotocol, parse_extension, parse_subprotocol, ) -from .http import USER_AGENT, build_host -from .http_legacy import read_response +from ..http import USER_AGENT, build_host +from ..typing import ExtensionHeader, Origin, Subprotocol +from ..uri import WebSocketURI, parse_uri +from .handshake import build_request, check_response +from .http import read_response from .protocol import WebSocketCommonProtocol -from .typing import ExtensionHeader, Origin, Subprotocol -from .uri import WebSocketURI, parse_uri __all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] diff --git a/src/websockets/legacy/framing.py b/src/websockets/legacy/framing.py new file mode 100644 index 000000000..e41c295dd --- /dev/null +++ b/src/websockets/legacy/framing.py @@ -0,0 +1,135 @@ +""" +:mod:`websockets.legacy.framing` reads and writes WebSocket frames. + +It deals with a single frame at a time. Anything that depends on the sequence +of frames is implemented in :mod:`websockets.legacy.protocol`. + +See `section 5 of RFC 6455`_. + +.. _section 5 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-5 + +""" + +import struct +from typing import Any, Awaitable, Callable, Optional, Sequence + +from ..exceptions import PayloadTooBig, ProtocolError +from ..frames import Frame as NewFrame, Opcode + + +try: + from ..speedups import apply_mask +except ImportError: # pragma: no cover + from ..utils import apply_mask + + +class Frame(NewFrame): + @classmethod + async def read( + cls, + reader: Callable[[int], Awaitable[bytes]], + *, + mask: bool, + max_size: Optional[int] = None, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + ) -> "Frame": + """ + Read a WebSocket frame. + + :param reader: coroutine that reads exactly the requested number of + bytes, unless the end of file is reached + :param mask: whether the frame should be masked i.e. whether the read + happens on the server side + :param max_size: maximum payload size in bytes + :param extensions: list of classes with a ``decode()`` method that + transforms the frame and return a new frame; extensions are applied + in reverse order + :raises ~websockets.exceptions.PayloadTooBig: if the frame exceeds + ``max_size`` + :raises ~websockets.exceptions.ProtocolError: if the frame + contains incorrect values + + """ + + # Read the header. + data = await reader(2) + head1, head2 = struct.unpack("!BB", data) + + # While not Pythonic, this is marginally faster than calling bool(). + fin = True if head1 & 0b10000000 else False + rsv1 = True if head1 & 0b01000000 else False + rsv2 = True if head1 & 0b00100000 else False + rsv3 = True if head1 & 0b00010000 else False + + try: + opcode = Opcode(head1 & 0b00001111) + except ValueError as exc: + raise ProtocolError("invalid opcode") from exc + + if (True if head2 & 0b10000000 else False) != mask: + raise ProtocolError("incorrect masking") + + length = head2 & 0b01111111 + if length == 126: + data = await reader(2) + (length,) = struct.unpack("!H", data) + elif length == 127: + data = await reader(8) + (length,) = struct.unpack("!Q", data) + if max_size is not None and length > max_size: + raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)") + if mask: + mask_bits = await reader(4) + + # Read the data. + data = await reader(length) + if mask: + data = apply_mask(data, mask_bits) + + frame = cls(fin, opcode, data, rsv1, rsv2, rsv3) + + if extensions is None: + extensions = [] + for extension in reversed(extensions): + frame = cls(*extension.decode(frame, max_size=max_size)) + + frame.check() + + return frame + + def write( + self, + write: Callable[[bytes], Any], + *, + mask: bool, + extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + ) -> None: + """ + Write a WebSocket frame. + + :param frame: frame to write + :param write: function that writes bytes + :param mask: whether the frame should be masked i.e. whether the write + happens on the client side + :param extensions: list of classes with an ``encode()`` method that + transform the frame and return a new frame; extensions are applied + in order + :raises ~websockets.exceptions.ProtocolError: if the frame + contains incorrect values + + """ + # The frame is written in a single call to write in order to prevent + # TCP fragmentation. See #68 for details. This also makes it safe to + # send frames concurrently from multiple coroutines. + write(self.serialize(mask=mask, extensions=extensions)) + + +# Backwards compatibility with previously documented public APIs +from ..frames import parse_close # isort:skip # noqa +from ..frames import prepare_ctrl as encode_data # isort:skip # noqa +from ..frames import prepare_data # isort:skip # noqa +from ..frames import serialize_close # isort:skip # noqa + + +# at the bottom to allow circular import, because Extension depends on Frame +import websockets.extensions.base # isort:skip # noqa diff --git a/src/websockets/handshake_legacy.py b/src/websockets/legacy/handshake.py similarity index 93% rename from src/websockets/handshake_legacy.py rename to src/websockets/legacy/handshake.py index d34ca5f7f..44da72d21 100644 --- a/src/websockets/handshake_legacy.py +++ b/src/websockets/legacy/handshake.py @@ -1,5 +1,5 @@ """ -:mod:`websockets.handshake` provides helpers for the WebSocket handshake. +:mod:`websockets.legacy.handshake` provides helpers for the WebSocket handshake. See `section 4 of RFC 6455`_. @@ -29,11 +29,11 @@ import binascii from typing import List -from .datastructures import Headers, MultipleValuesError -from .exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade -from .headers import parse_connection, parse_upgrade -from .typing import ConnectionOption, UpgradeProtocol -from .utils import accept_key as accept, generate_key +from ..datastructures import Headers, MultipleValuesError +from ..exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade +from ..headers import parse_connection, parse_upgrade +from ..typing import ConnectionOption, UpgradeProtocol +from ..utils import accept_key as accept, generate_key __all__ = ["build_request", "check_request", "build_response", "check_response"] diff --git a/src/websockets/http_legacy.py b/src/websockets/legacy/http.py similarity index 98% rename from src/websockets/http_legacy.py rename to src/websockets/legacy/http.py index 5afe5f898..c18e08e8d 100644 --- a/src/websockets/http_legacy.py +++ b/src/websockets/legacy/http.py @@ -2,8 +2,8 @@ import re from typing import Tuple -from .datastructures import Headers -from .exceptions import SecurityError +from ..datastructures import Headers +from ..exceptions import SecurityError __all__ = ["read_request", "read_response"] diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py new file mode 100644 index 000000000..e4592b8a0 --- /dev/null +++ b/src/websockets/legacy/protocol.py @@ -0,0 +1,1459 @@ +""" +:mod:`websockets.legacy.protocol` handles WebSocket control and data frames. + +See `sections 4 to 8 of RFC 6455`_. + +.. _sections 4 to 8 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 + +""" + +import asyncio +import codecs +import collections +import enum +import logging +import random +import struct +import sys +import warnings +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Awaitable, + Deque, + Dict, + Iterable, + List, + Mapping, + Optional, + Union, + cast, +) + +from ..datastructures import Headers +from ..exceptions import ( + ConnectionClosed, + ConnectionClosedError, + ConnectionClosedOK, + InvalidState, + PayloadTooBig, + ProtocolError, +) +from ..extensions.base import Extension +from ..frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + Opcode, + parse_close, + prepare_ctrl, + prepare_data, + serialize_close, +) +from ..typing import Data, Subprotocol +from .framing import Frame + + +__all__ = ["WebSocketCommonProtocol"] + +logger = logging.getLogger(__name__) + + +# A WebSocket connection goes through the following four states, in order: + + +class State(enum.IntEnum): + CONNECTING, OPEN, CLOSING, CLOSED = range(4) + + +# In order to ensure consistency, the code always checks the current value of +# WebSocketCommonProtocol.state before assigning a new value and never yields +# between the check and the assignment. + + +class WebSocketCommonProtocol(asyncio.Protocol): + """ + :class:`~asyncio.Protocol` subclass implementing the data transfer phase. + + Once the WebSocket connection is established, during the data transfer + phase, the protocol is almost symmetrical between the server side and the + client side. :class:`WebSocketCommonProtocol` implements logic that's + shared between servers and clients.. + + Subclasses such as + :class:`~websockets.legacy.server.WebSocketServerProtocol` and + :class:`~websockets.legacy.client.WebSocketClientProtocol` implement the + opening handshake, which is different between servers and clients. + + :class:`WebSocketCommonProtocol` performs four functions: + + * It runs a task that stores incoming data frames in a queue and makes + them available with the :meth:`recv` coroutine. + * It sends outgoing data frames with the :meth:`send` coroutine. + * It deals with control frames automatically. + * It performs the closing handshake. + + :class:`WebSocketCommonProtocol` supports asynchronous iteration:: + + async for message in websocket: + await process(message) + + The iterator yields incoming messages. It exits normally when the + connection is closed with the close code 1000 (OK) or 1001 (going away). + It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception + when the connection is closed with any other code. + + Once the connection is open, a `Ping frame`_ is sent every + ``ping_interval`` seconds. This serves as a keepalive. It helps keeping + the connection open, especially in the presence of proxies with short + timeouts on inactive connections. Set ``ping_interval`` to ``None`` to + disable this behavior. + + .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 + + If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` + seconds, the connection is considered unusable and is closed with + code 1011. This ensures that the remote endpoint remains responsive. Set + ``ping_timeout`` to ``None`` to disable this behavior. + + .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3 + + The ``close_timeout`` parameter defines a maximum wait time in seconds for + completing the closing handshake and terminating the TCP connection. + :meth:`close` completes in at most ``4 * close_timeout`` on the server + side and ``5 * close_timeout`` on the client side. + + ``close_timeout`` needs to be a parameter of the protocol because + ``websockets`` usually calls :meth:`close` implicitly: + + - on the server side, when the connection handler terminates, + - on the client side, when exiting the context manager for the connection. + + To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. + + The ``max_size`` parameter enforces the maximum size for incoming messages + in bytes. The default value is 1 MiB. ``None`` disables the limit. If a + message larger than the maximum size is received, :meth:`recv` will + raise :exc:`~websockets.exceptions.ConnectionClosedError` and the + connection will be closed with code 1009. + + The ``max_queue`` parameter sets the maximum length of the queue that + holds incoming messages. The default value is ``32``. ``None`` disables + the limit. Messages are added to an in-memory queue when they're received; + then :meth:`recv` pops from that queue. In order to prevent excessive + memory consumption when messages are received faster than they can be + processed, the queue must be bounded. If the queue fills up, the protocol + stops processing incoming data until :meth:`recv` is called. In this + situation, various receive buffers (at least in ``asyncio`` and in the OS) + will fill up, then the TCP receive window will shrink, slowing down + transmission to avoid packet loss. + + Since Python can use up to 4 bytes of memory to represent a single + character, each connection may use up to ``4 * max_size * max_queue`` + bytes of memory to store incoming messages. By default, this is 128 MiB. + You may want to lower the limits, depending on your application's + requirements. + + The ``read_limit`` argument sets the high-water limit of the buffer for + incoming bytes. The low-water limit is half the high-water limit. The + default value is 64 KiB, half of asyncio's default (based on the current + implementation of :class:`~asyncio.StreamReader`). + + The ``write_limit`` argument sets the high-water limit of the buffer for + outgoing bytes. The low-water limit is a quarter of the high-water limit. + The default value is 64 KiB, equal to asyncio's default (based on the + current implementation of ``FlowControlMixin``). + + As soon as the HTTP request and response in the opening handshake are + processed: + + * the request path is available in the :attr:`path` attribute; + * the request and response HTTP headers are available in the + :attr:`request_headers` and :attr:`response_headers` attributes, + which are :class:`~websockets.http.Headers` instances. + + If a subprotocol was negotiated, it's available in the :attr:`subprotocol` + attribute. + + Once the connection is closed, the code is available in the + :attr:`close_code` attribute and the reason in :attr:`close_reason`. + + All these attributes must be treated as read-only. + + """ + + # There are only two differences between the client-side and server-side + # behavior: masking the payload and closing the underlying TCP connection. + # Set is_client = True/False and side = "client"/"server" to pick a side. + is_client: bool + side: str = "undefined" + + def __init__( + self, + *, + ping_interval: Optional[float] = 20, + ping_timeout: Optional[float] = 20, + close_timeout: Optional[float] = None, + max_size: Optional[int] = 2 ** 20, + max_queue: Optional[int] = 2 ** 5, + read_limit: int = 2 ** 16, + write_limit: int = 2 ** 16, + loop: Optional[asyncio.AbstractEventLoop] = None, + # The following arguments are kept only for backwards compatibility. + host: Optional[str] = None, + port: Optional[int] = None, + secure: Optional[bool] = None, + legacy_recv: bool = False, + timeout: Optional[float] = None, + ) -> None: + # Backwards compatibility: close_timeout used to be called timeout. + if timeout is None: + timeout = 10 + else: + warnings.warn("rename timeout to close_timeout", DeprecationWarning) + # If both are specified, timeout is ignored. + if close_timeout is None: + close_timeout = timeout + + self.ping_interval = ping_interval + self.ping_timeout = ping_timeout + self.close_timeout = close_timeout + self.max_size = max_size + self.max_queue = max_queue + self.read_limit = read_limit + self.write_limit = write_limit + + if loop is None: + loop = asyncio.get_event_loop() + self.loop = loop + + self._host = host + self._port = port + self._secure = secure + self.legacy_recv = legacy_recv + + # Configure read buffer limits. The high-water limit is defined by + # ``self.read_limit``. The ``limit`` argument controls the line length + # limit and half the buffer limit of :class:`~asyncio.StreamReader`. + # That's why it must be set to half of ``self.read_limit``. + self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) + + # Copied from asyncio.FlowControlMixin + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + + self._drain_lock = asyncio.Lock( + loop=loop if sys.version_info[:2] < (3, 8) else None + ) + + # This class implements the data transfer and closing handshake, which + # are shared between the client-side and the server-side. + # Subclasses implement the opening handshake and, on success, execute + # :meth:`connection_open` to change the state to OPEN. + self.state = State.CONNECTING + logger.debug("%s - state = CONNECTING", self.side) + + # HTTP protocol parameters. + self.path: str + self.request_headers: Headers + self.response_headers: Headers + + # WebSocket protocol parameters. + self.extensions: List[Extension] = [] + self.subprotocol: Optional[Subprotocol] = None + + # The close code and reason are set when receiving a close frame or + # losing the TCP connection. + self.close_code: int + self.close_reason: str + + # Completed when the connection state becomes CLOSED. Translates the + # :meth:`connection_lost` callback to a :class:`~asyncio.Future` + # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are + # translated by ``self.stream_reader``). + self.connection_lost_waiter: asyncio.Future[None] = loop.create_future() + + # Queue of received messages. + self.messages: Deque[Data] = collections.deque() + self._pop_message_waiter: Optional[asyncio.Future[None]] = None + self._put_message_waiter: Optional[asyncio.Future[None]] = None + + # Protect sending fragmented messages. + self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None + + # Mapping of ping IDs to pong waiters, in chronological order. + self.pings: Dict[bytes, asyncio.Future[None]] = {} + + # Task running the data transfer. + self.transfer_data_task: asyncio.Task[None] + + # Exception that occurred during data transfer, if any. + self.transfer_data_exc: Optional[BaseException] = None + + # Task sending keepalive pings. + self.keepalive_ping_task: asyncio.Task[None] + + # Task closing the TCP connection. + self.close_connection_task: asyncio.Task[None] + + # Copied from asyncio.FlowControlMixin + async def _drain_helper(self) -> None: # pragma: no cover + if self.connection_lost_waiter.done(): + raise ConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + assert waiter is None or waiter.cancelled() + waiter = self.loop.create_future() + self._drain_waiter = waiter + await waiter + + # Copied from asyncio.StreamWriter + async def _drain(self) -> None: # pragma: no cover + if self.reader is not None: + exc = self.reader.exception() + if exc is not None: + raise exc + if self.transport is not None: + if self.transport.is_closing(): + # Yield to the event loop so connection_lost() may be + # called. Without this, _drain_helper() would return + # immediately, and code that calls + # write(...); yield from drain() + # in a loop would never call connection_lost(), so it + # would not see an error when the socket is closed. + await asyncio.sleep( + 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) + await self._drain_helper() + + def connection_open(self) -> None: + """ + Callback when the WebSocket opening handshake completes. + + Enter the OPEN state and start the data transfer phase. + + """ + # 4.1. The WebSocket Connection is Established. + assert self.state is State.CONNECTING + self.state = State.OPEN + logger.debug("%s - state = OPEN", self.side) + # Start the task that receives incoming WebSocket messages. + self.transfer_data_task = self.loop.create_task(self.transfer_data()) + # Start the task that sends pings at regular intervals. + self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping()) + # Start the task that eventually closes the TCP connection. + self.close_connection_task = self.loop.create_task(self.close_connection()) + + @property + def host(self) -> Optional[str]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning) + return self._host + + @property + def port(self) -> Optional[int]: + alternative = "remote_address" if self.is_client else "local_address" + warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning) + return self._port + + @property + def secure(self) -> Optional[bool]: + warnings.warn("don't use secure", DeprecationWarning) + return self._secure + + # Public API + + @property + def local_address(self) -> Any: + """ + Local address of the connection as a ``(host, port)`` tuple. + + When the connection isn't open, ``local_address`` is ``None``. + + """ + try: + transport = self.transport + except AttributeError: + return None + else: + return transport.get_extra_info("sockname") + + @property + def remote_address(self) -> Any: + """ + Remote address of the connection as a ``(host, port)`` tuple. + + When the connection isn't open, ``remote_address`` is ``None``. + + """ + try: + transport = self.transport + except AttributeError: + return None + else: + return transport.get_extra_info("peername") + + @property + def open(self) -> bool: + """ + ``True`` when the connection is usable. + + It may be used to detect disconnections. However, this approach is + discouraged per the EAFP_ principle. + + When ``open`` is ``False``, using the connection raises a + :exc:`~websockets.exceptions.ConnectionClosed` exception. + + .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp + + """ + return self.state is State.OPEN and not self.transfer_data_task.done() + + @property + def closed(self) -> bool: + """ + ``True`` once the connection is closed. + + Be aware that both :attr:`open` and :attr:`closed` are ``False`` during + the opening and closing sequences. + + """ + return self.state is State.CLOSED + + async def wait_closed(self) -> None: + """ + Wait until the connection is closed. + + This is identical to :attr:`closed`, except it can be awaited. + + This can make it easier to handle connection termination, regardless + of its cause, in tasks that interact with the WebSocket connection. + + """ + await asyncio.shield(self.connection_lost_waiter) + + async def __aiter__(self) -> AsyncIterator[Data]: + """ + Iterate on received messages. + + Exit normally when the connection is closed with code 1000 or 1001. + + Raise an exception in other cases. + + """ + try: + while True: + yield await self.recv() + except ConnectionClosedOK: + return + + async def recv(self) -> Data: + """ + Receive the next message. + + Return a :class:`str` for a text frame and :class:`bytes` for a binary + frame. + + When the end of the message stream is reached, :meth:`recv` raises + :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it + raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal + connection closure and + :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol + error or a network failure. + + .. versionchanged:: 3.0 + + :meth:`recv` used to return ``None`` instead. Refer to the + changelog for details. + + Canceling :meth:`recv` is safe. There's no risk of losing the next + message. The next invocation of :meth:`recv` will return it. This + makes it possible to enforce a timeout by wrapping :meth:`recv` in + :func:`~asyncio.wait_for`. + + :raises ~websockets.exceptions.ConnectionClosed: when the + connection is closed + :raises RuntimeError: if two coroutines call :meth:`recv` concurrently + + """ + if self._pop_message_waiter is not None: + raise RuntimeError( + "cannot call recv while another coroutine " + "is already waiting for the next message" + ) + + # Don't await self.ensure_open() here: + # - messages could be available in the queue even if the connection + # is closed; + # - messages could be received before the closing frame even if the + # connection is closing. + + # Wait until there's a message in the queue (if necessary) or the + # connection is closed. + while len(self.messages) <= 0: + pop_message_waiter: asyncio.Future[None] = self.loop.create_future() + self._pop_message_waiter = pop_message_waiter + try: + # If asyncio.wait() is canceled, it doesn't cancel + # pop_message_waiter and self.transfer_data_task. + await asyncio.wait( + [pop_message_waiter, self.transfer_data_task], + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + return_when=asyncio.FIRST_COMPLETED, + ) + finally: + self._pop_message_waiter = None + + # If asyncio.wait(...) exited because self.transfer_data_task + # completed before receiving a new message, raise a suitable + # exception (or return None if legacy_recv is enabled). + if not pop_message_waiter.done(): + if self.legacy_recv: + return None # type: ignore + else: + # Wait until the connection is closed to raise + # ConnectionClosed with the correct code and reason. + await self.ensure_open() + + # Pop a message from the queue. + message = self.messages.popleft() + + # Notify transfer_data(). + if self._put_message_waiter is not None: + self._put_message_waiter.set_result(None) + self._put_message_waiter = None + + return message + + async def send( + self, message: Union[Data, Iterable[Data], AsyncIterable[Data]] + ) -> None: + """ + Send a message. + + A string (:class:`str`) is sent as a `Text frame`_. A bytestring or + bytes-like object (:class:`bytes`, :class:`bytearray`, or + :class:`memoryview`) is sent as a `Binary frame`_. + + .. _Text frame: https://tools.ietf.org/html/rfc6455#section-5.6 + .. _Binary frame: https://tools.ietf.org/html/rfc6455#section-5.6 + + :meth:`send` also accepts an iterable or an asynchronous iterable of + strings, bytestrings, or bytes-like objects. In that case the message + is fragmented. Each item is treated as a message fragment and sent in + its own frame. All items must be of the same type, or else + :meth:`send` will raise a :exc:`TypeError` and the connection will be + closed. + + :meth:`send` rejects dict-like objects because this is often an error. + If you wish to send the keys of a dict-like object as fragments, call + its :meth:`~dict.keys` method and pass the result to :meth:`send`. + + Canceling :meth:`send` is discouraged. Instead, you should close the + connection with :meth:`close`. Indeed, there only two situations where + :meth:`send` yields control to the event loop: + + 1. The write buffer is full. If you don't want to wait until enough + data is sent, your only alternative is to close the connection. + :meth:`close` will likely time out then abort the TCP connection. + 2. ``message`` is an asynchronous iterator. Stopping in the middle of + a fragmented message will cause a protocol error. Closing the + connection has the same effect. + + :raises TypeError: for unsupported inputs + + """ + await self.ensure_open() + + # While sending a fragmented message, prevent sending other messages + # until all fragments are sent. + while self._fragmented_message_waiter is not None: + await asyncio.shield(self._fragmented_message_waiter) + + # Unfragmented message -- this case must be handled first because + # strings and bytes-like objects are iterable. + + if isinstance(message, (str, bytes, bytearray, memoryview)): + opcode, data = prepare_data(message) + await self.write_frame(True, opcode, data) + + # Catch a common mistake -- passing a dict to send(). + + elif isinstance(message, Mapping): + raise TypeError("data is a dict-like object") + + # Fragmented message -- regular iterator. + + elif isinstance(message, Iterable): + + # Work around https://github.com/python/mypy/issues/6227 + message = cast(Iterable[Data], message) + + iter_message = iter(message) + try: + message_chunk = next(iter_message) + except StopIteration: + return + opcode, data = prepare_data(message_chunk) + + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) + + # Other fragments. + for message_chunk in iter_message: + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except Exception: + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None + + # Fragmented message -- asynchronous iterator + + elif isinstance(message, AsyncIterable): + # aiter_message = aiter(message) without aiter + # https://github.com/python/mypy/issues/5738 + aiter_message = type(message).__aiter__(message) # type: ignore + try: + # message_chunk = anext(aiter_message) without anext + # https://github.com/python/mypy/issues/5738 + message_chunk = await type(aiter_message).__anext__( # type: ignore + aiter_message + ) + except StopAsyncIteration: + return + opcode, data = prepare_data(message_chunk) + + self._fragmented_message_waiter = asyncio.Future() + try: + # First fragment. + await self.write_frame(False, opcode, data) + + # Other fragments. + # https://github.com/python/mypy/issues/5738 + async for message_chunk in aiter_message: # type: ignore + confirm_opcode, data = prepare_data(message_chunk) + if confirm_opcode != opcode: + raise TypeError("data contains inconsistent types") + await self.write_frame(False, OP_CONT, data) + + # Final fragment. + await self.write_frame(True, OP_CONT, b"") + + except Exception: + # We're half-way through a fragmented message and we can't + # complete it. This makes the connection unusable. + self.fail_connection(1011) + raise + + finally: + self._fragmented_message_waiter.set_result(None) + self._fragmented_message_waiter = None + + else: + raise TypeError("data must be bytes, str, or iterable") + + async def close(self, code: int = 1000, reason: str = "") -> None: + """ + Perform the closing handshake. + + :meth:`close` waits for the other end to complete the handshake and + for the TCP connection to terminate. As a consequence, there's no need + to await :meth:`wait_closed`; :meth:`close` already does it. + + :meth:`close` is idempotent: it doesn't do anything once the + connection is closed. + + Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given + that errors during connection termination aren't particularly useful. + + Canceling :meth:`close` is discouraged. If it takes too long, you can + set a shorter ``close_timeout``. If you don't want to wait, let the + Python process exit, then the OS will close the TCP connection. + + :param code: WebSocket close code + :param reason: WebSocket close reason + + """ + try: + await asyncio.wait_for( + self.write_close_frame(serialize_close(code, reason)), + self.close_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + except asyncio.TimeoutError: + # If the close frame cannot be sent because the send buffers + # are full, the closing handshake won't complete anyway. + # Fail the connection to shut down faster. + self.fail_connection() + + # If no close frame is received within the timeout, wait_for() cancels + # the data transfer task and raises TimeoutError. + + # If close() is called multiple times concurrently and one of these + # calls hits the timeout, the data transfer task will be cancelled. + # Other calls will receive a CancelledError here. + + try: + # If close() is canceled during the wait, self.transfer_data_task + # is canceled before the timeout elapses. + await asyncio.wait_for( + self.transfer_data_task, + self.close_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + except (asyncio.TimeoutError, asyncio.CancelledError): + pass + + # Wait for the close connection task to close the TCP connection. + await asyncio.shield(self.close_connection_task) + + async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: + """ + Send a ping. + + Return a :class:`~asyncio.Future` that will be completed when the + corresponding pong is received. You can ignore it if you don't intend + to wait. + + A ping may serve as a keepalive or as a check that the remote endpoint + received all messages up to this point:: + + pong_waiter = await ws.ping() + await pong_waiter # only if you want to wait for the pong + + By default, the ping contains four random bytes. This payload may be + overridden with the optional ``data`` argument which must be a string + (which will be encoded to UTF-8) or a bytes-like object. + + Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return + immediately, it means the write buffer is full. If you don't want to + wait, you should close the connection. + + Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no + effect. + + """ + await self.ensure_open() + + if data is not None: + data = prepare_ctrl(data) + + # Protect against duplicates if a payload is explicitly set. + if data in self.pings: + raise ValueError("already waiting for a pong with the same data") + + # Generate a unique random payload otherwise. + while data is None or data in self.pings: + data = struct.pack("!I", random.getrandbits(32)) + + self.pings[data] = self.loop.create_future() + + await self.write_frame(True, OP_PING, data) + + return asyncio.shield(self.pings[data]) + + async def pong(self, data: Data = b"") -> None: + """ + Send a pong. + + An unsolicited pong may serve as a unidirectional heartbeat. + + The payload may be set with the optional ``data`` argument which must + be a string (which will be encoded to UTF-8) or a bytes-like object. + + Canceling :meth:`pong` is discouraged for the same reason as + :meth:`ping`. + + """ + await self.ensure_open() + + data = prepare_ctrl(data) + + await self.write_frame(True, OP_PONG, data) + + # Private methods - no guarantees. + + def connection_closed_exc(self) -> ConnectionClosed: + exception: ConnectionClosed + if self.close_code == 1000 or self.close_code == 1001: + exception = ConnectionClosedOK(self.close_code, self.close_reason) + else: + exception = ConnectionClosedError(self.close_code, self.close_reason) + # Chain to the exception that terminated data transfer, if any. + exception.__cause__ = self.transfer_data_exc + return exception + + async def ensure_open(self) -> None: + """ + Check that the WebSocket connection is open. + + Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't. + + """ + # Handle cases from most common to least common for performance. + if self.state is State.OPEN: + # If self.transfer_data_task exited without a closing handshake, + # self.close_connection_task may be closing the connection, going + # straight from OPEN to CLOSED. + if self.transfer_data_task.done(): + await asyncio.shield(self.close_connection_task) + raise self.connection_closed_exc() + else: + return + + if self.state is State.CLOSED: + raise self.connection_closed_exc() + + if self.state is State.CLOSING: + # If we started the closing handshake, wait for its completion to + # get the proper close code and reason. self.close_connection_task + # will complete within 4 or 5 * close_timeout after close(). The + # CLOSING state also occurs when failing the connection. In that + # case self.close_connection_task will complete even faster. + await asyncio.shield(self.close_connection_task) + raise self.connection_closed_exc() + + # Control may only reach this point in buggy third-party subclasses. + assert self.state is State.CONNECTING + raise InvalidState("WebSocket connection isn't established yet") + + async def transfer_data(self) -> None: + """ + Read incoming messages and put them in a queue. + + This coroutine runs in a task until the closing handshake is started. + + """ + try: + while True: + message = await self.read_message() + + # Exit the loop when receiving a close frame. + if message is None: + break + + # Wait until there's room in the queue (if necessary). + if self.max_queue is not None: + while len(self.messages) >= self.max_queue: + self._put_message_waiter = self.loop.create_future() + try: + await asyncio.shield(self._put_message_waiter) + finally: + self._put_message_waiter = None + + # Put the message in the queue. + self.messages.append(message) + + # Notify recv(). + if self._pop_message_waiter is not None: + self._pop_message_waiter.set_result(None) + self._pop_message_waiter = None + + except asyncio.CancelledError as exc: + self.transfer_data_exc = exc + # If fail_connection() cancels this task, avoid logging the error + # twice and failing the connection again. + raise + + except ProtocolError as exc: + self.transfer_data_exc = exc + self.fail_connection(1002) + + except (ConnectionError, TimeoutError, EOFError) as exc: + # Reading data with self.reader.readexactly may raise: + # - most subclasses of ConnectionError if the TCP connection + # breaks, is reset, or is aborted; + # - TimeoutError if the TCP connection times out; + # - IncompleteReadError, a subclass of EOFError, if fewer + # bytes are available than requested. + self.transfer_data_exc = exc + self.fail_connection(1006) + + except UnicodeDecodeError as exc: + self.transfer_data_exc = exc + self.fail_connection(1007) + + except PayloadTooBig as exc: + self.transfer_data_exc = exc + self.fail_connection(1009) + + except Exception as exc: + # This shouldn't happen often because exceptions expected under + # regular circumstances are handled above. If it does, consider + # catching and handling more exceptions. + logger.error("Error in data transfer", exc_info=True) + + self.transfer_data_exc = exc + self.fail_connection(1011) + + async def read_message(self) -> Optional[Data]: + """ + Read a single message from the connection. + + Re-assemble data frames if the message is fragmented. + + Return ``None`` when the closing handshake is started. + + """ + frame = await self.read_data_frame(max_size=self.max_size) + + # A close frame was received. + if frame is None: + return None + + if frame.opcode == OP_TEXT: + text = True + elif frame.opcode == OP_BINARY: + text = False + else: # frame.opcode == OP_CONT + raise ProtocolError("unexpected opcode") + + # Shortcut for the common case - no fragmentation + if frame.fin: + return frame.data.decode("utf-8") if text else frame.data + + # 5.4. Fragmentation + chunks: List[Data] = [] + max_size = self.max_size + if text: + decoder_factory = codecs.getincrementaldecoder("utf-8") + decoder = decoder_factory(errors="strict") + if max_size is None: + + def append(frame: Frame) -> None: + nonlocal chunks + chunks.append(decoder.decode(frame.data, frame.fin)) + + else: + + def append(frame: Frame) -> None: + nonlocal chunks, max_size + chunks.append(decoder.decode(frame.data, frame.fin)) + assert isinstance(max_size, int) + max_size -= len(frame.data) + + else: + if max_size is None: + + def append(frame: Frame) -> None: + nonlocal chunks + chunks.append(frame.data) + + else: + + def append(frame: Frame) -> None: + nonlocal chunks, max_size + chunks.append(frame.data) + assert isinstance(max_size, int) + max_size -= len(frame.data) + + append(frame) + + while not frame.fin: + frame = await self.read_data_frame(max_size=max_size) + if frame is None: + raise ProtocolError("incomplete fragmented message") + if frame.opcode != OP_CONT: + raise ProtocolError("unexpected opcode") + append(frame) + + # mypy cannot figure out that chunks have the proper type. + return ("" if text else b"").join(chunks) # type: ignore + + async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]: + """ + Read a single data frame from the connection. + + Process control frames received before the next data frame. + + Return ``None`` if a close frame is encountered before any data frame. + + """ + # 6.2. Receiving Data + while True: + frame = await self.read_frame(max_size) + + # 5.5. Control Frames + if frame.opcode == OP_CLOSE: + # 7.1.5. The WebSocket Connection Close Code + # 7.1.6. The WebSocket Connection Close Reason + self.close_code, self.close_reason = parse_close(frame.data) + try: + # Echo the original data instead of re-serializing it with + # serialize_close() because that fails when the close frame + # is empty and parse_close() synthetizes a 1005 close code. + await self.write_close_frame(frame.data) + except ConnectionClosed: + # It doesn't really matter if the connection was closed + # before we could send back a close frame. + pass + return None + + elif frame.opcode == OP_PING: + # Answer pings. + ping_hex = frame.data.hex() or "[empty]" + logger.debug( + "%s - received ping, sending pong: %s", self.side, ping_hex + ) + await self.pong(frame.data) + + elif frame.opcode == OP_PONG: + # Acknowledge pings on solicited pongs. + if frame.data in self.pings: + logger.debug( + "%s - received solicited pong: %s", + self.side, + frame.data.hex() or "[empty]", + ) + # Acknowledge all pings up to the one matching this pong. + ping_id = None + ping_ids = [] + for ping_id, ping in self.pings.items(): + ping_ids.append(ping_id) + if not ping.done(): + ping.set_result(None) + if ping_id == frame.data: + break + else: # pragma: no cover + assert False, "ping_id is in self.pings" + # Remove acknowledged pings from self.pings. + for ping_id in ping_ids: + del self.pings[ping_id] + ping_ids = ping_ids[:-1] + if ping_ids: + pings_hex = ", ".join( + ping_id.hex() or "[empty]" for ping_id in ping_ids + ) + plural = "s" if len(ping_ids) > 1 else "" + logger.debug( + "%s - acknowledged previous ping%s: %s", + self.side, + plural, + pings_hex, + ) + else: + logger.debug( + "%s - received unsolicited pong: %s", + self.side, + frame.data.hex() or "[empty]", + ) + + # 5.6. Data Frames + else: + return frame + + async def read_frame(self, max_size: Optional[int]) -> Frame: + """ + Read a single frame from the connection. + + """ + frame = await Frame.read( + self.reader.readexactly, + mask=not self.is_client, + max_size=max_size, + extensions=self.extensions, + ) + logger.debug("%s < %r", self.side, frame) + return frame + + async def write_frame( + self, fin: bool, opcode: int, data: bytes, *, _expected_state: int = State.OPEN + ) -> None: + # Defensive assertion for protocol compliance. + if self.state is not _expected_state: # pragma: no cover + raise InvalidState( + f"Cannot write to a WebSocket in the {self.state.name} state" + ) + + frame = Frame(fin, Opcode(opcode), data) + logger.debug("%s > %r", self.side, frame) + frame.write( + self.transport.write, mask=self.is_client, extensions=self.extensions + ) + + try: + # drain() cannot be called concurrently by multiple coroutines: + # http://bugs.python.org/issue29930. Remove this lock when no + # version of Python where this bugs exists is supported anymore. + async with self._drain_lock: + # Handle flow control automatically. + await self._drain() + except ConnectionError: + # Terminate the connection if the socket died. + self.fail_connection() + # Wait until the connection is closed to raise ConnectionClosed + # with the correct code and reason. + await self.ensure_open() + + async def write_close_frame(self, data: bytes = b"") -> None: + """ + Write a close frame if and only if the connection state is OPEN. + + This dedicated coroutine must be used for writing close frames to + ensure that at most one close frame is sent on a given connection. + + """ + # Test and set the connection state before sending the close frame to + # avoid sending two frames in case of concurrent calls. + if self.state is State.OPEN: + # 7.1.3. The WebSocket Closing Handshake is Started + self.state = State.CLOSING + logger.debug("%s - state = CLOSING", self.side) + + # 7.1.2. Start the WebSocket Closing Handshake + await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) + + async def keepalive_ping(self) -> None: + """ + Send a Ping frame and wait for a Pong frame at regular intervals. + + This coroutine exits when the connection terminates and one of the + following happens: + + - :meth:`ping` raises :exc:`ConnectionClosed`, or + - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. + + """ + if self.ping_interval is None: + return + + try: + while True: + await asyncio.sleep( + self.ping_interval, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + + # ping() raises CancelledError if the connection is closed, + # when close_connection() cancels self.keepalive_ping_task. + + # ping() raises ConnectionClosed if the connection is lost, + # when connection_lost() calls abort_pings(). + + pong_waiter = await self.ping() + + if self.ping_timeout is not None: + try: + await asyncio.wait_for( + pong_waiter, + self.ping_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + except asyncio.TimeoutError: + logger.debug("%s ! timed out waiting for pong", self.side) + self.fail_connection(1011) + break + + # Remove this branch when dropping support for Python < 3.8 + # because CancelledError no longer inherits Exception. + except asyncio.CancelledError: + raise + + except ConnectionClosed: + pass + + except Exception: + logger.warning("Unexpected exception in keepalive ping task", exc_info=True) + + async def close_connection(self) -> None: + """ + 7.1.1. Close the WebSocket Connection + + When the opening handshake succeeds, :meth:`connection_open` starts + this coroutine in a task. It waits for the data transfer phase to + complete then it closes the TCP connection cleanly. + + When the opening handshake fails, :meth:`fail_connection` does the + same. There's no data transfer phase in that case. + + """ + try: + # Wait for the data transfer phase to complete. + if hasattr(self, "transfer_data_task"): + try: + await self.transfer_data_task + except asyncio.CancelledError: + pass + + # Cancel the keepalive ping task. + if hasattr(self, "keepalive_ping_task"): + self.keepalive_ping_task.cancel() + + # A client should wait for a TCP close from the server. + if self.is_client and hasattr(self, "transfer_data_task"): + if await self.wait_for_connection_lost(): + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover + logger.debug("%s ! timed out waiting for TCP close", self.side) + + # Half-close the TCP connection if possible (when there's no TLS). + if self.transport.can_write_eof(): + logger.debug("%s x half-closing TCP connection", self.side) + self.transport.write_eof() + + if await self.wait_for_connection_lost(): + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + return # pragma: no cover + logger.debug("%s ! timed out waiting for TCP close", self.side) + + finally: + # The try/finally ensures that the transport never remains open, + # even if this coroutine is canceled (for example). + + # If connection_lost() was called, the TCP connection is closed. + # However, if TLS is enabled, the transport still needs closing. + # Else asyncio complains: ResourceWarning: unclosed transport. + if self.connection_lost_waiter.done() and self.transport.is_closing(): + return + + # Close the TCP connection. Buffers are flushed asynchronously. + logger.debug("%s x closing TCP connection", self.side) + self.transport.close() + + if await self.wait_for_connection_lost(): + return + logger.debug("%s ! timed out waiting for TCP close", self.side) + + # Abort the TCP connection. Buffers are discarded. + logger.debug("%s x aborting TCP connection", self.side) + self.transport.abort() + + # connection_lost() is called quickly after aborting. + # Coverage marks this line as a partially executed branch. + # I supect a bug in coverage. Ignore it for now. + await self.wait_for_connection_lost() # pragma: no cover + + async def wait_for_connection_lost(self) -> bool: + """ + Wait until the TCP connection is closed or ``self.close_timeout`` elapses. + + Return ``True`` if the connection is closed and ``False`` otherwise. + + """ + if not self.connection_lost_waiter.done(): + try: + await asyncio.wait_for( + asyncio.shield(self.connection_lost_waiter), + self.close_timeout, + loop=self.loop if sys.version_info[:2] < (3, 8) else None, + ) + except asyncio.TimeoutError: + pass + # Re-check self.connection_lost_waiter.done() synchronously because + # connection_lost() could run between the moment the timeout occurs + # and the moment this coroutine resumes running. + return self.connection_lost_waiter.done() + + def fail_connection(self, code: int = 1006, reason: str = "") -> None: + """ + 7.1.7. Fail the WebSocket Connection + + This requires: + + 1. Stopping all processing of incoming data, which means cancelling + :attr:`transfer_data_task`. The close code will be 1006 unless a + close frame was received earlier. + + 2. Sending a close frame with an appropriate code if the opening + handshake succeeded and the other side is likely to process it. + + 3. Closing the connection. :meth:`close_connection` takes care of + this once :attr:`transfer_data_task` exits after being canceled. + + (The specification describes these steps in the opposite order.) + + """ + logger.debug( + "%s ! failing %s WebSocket connection with code %d", + self.side, + self.state.name, + code, + ) + + # Cancel transfer_data_task if the opening handshake succeeded. + # cancel() is idempotent and ignored if the task is done already. + if hasattr(self, "transfer_data_task"): + self.transfer_data_task.cancel() + + # Send a close frame when the state is OPEN (a close frame was already + # sent if it's CLOSING), except when failing the connection because of + # an error reading from or writing to the network. + # Don't send a close frame if the connection is broken. + if code != 1006 and self.state is State.OPEN: + + frame_data = serialize_close(code, reason) + + # Write the close frame without draining the write buffer. + + # Keeping fail_connection() synchronous guarantees it can't + # get stuck and simplifies the implementation of the callers. + # Not drainig the write buffer is acceptable in this context. + + # This duplicates a few lines of code from write_close_frame() + # and write_frame(). + + self.state = State.CLOSING + logger.debug("%s - state = CLOSING", self.side) + + frame = Frame(True, OP_CLOSE, frame_data) + logger.debug("%s > %r", self.side, frame) + frame.write( + self.transport.write, mask=self.is_client, extensions=self.extensions + ) + + # Start close_connection_task if the opening handshake didn't succeed. + if not hasattr(self, "close_connection_task"): + self.close_connection_task = self.loop.create_task(self.close_connection()) + + def abort_pings(self) -> None: + """ + Raise ConnectionClosed in pending keepalive pings. + + They'll never receive a pong once the connection is closed. + + """ + assert self.state is State.CLOSED + exc = self.connection_closed_exc() + + for ping in self.pings.values(): + ping.set_exception(exc) + # If the exception is never retrieved, it will be logged when ping + # is garbage-collected. This is confusing for users. + # Given that ping is done (with an exception), canceling it does + # nothing, but it prevents logging the exception. + ping.cancel() + + if self.pings: + pings_hex = ", ".join(ping_id.hex() or "[empty]" for ping_id in self.pings) + plural = "s" if len(self.pings) > 1 else "" + logger.debug( + "%s - aborted pending ping%s: %s", self.side, plural, pings_hex + ) + + # asyncio.Protocol methods + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """ + Configure write buffer limits. + + The high-water limit is defined by ``self.write_limit``. + + The low-water limit currently defaults to ``self.write_limit // 4`` in + :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should + be all right for reasonable use cases of this library. + + This is the earliest point where we can get hold of the transport, + which means it's the best point for configuring it. + + """ + logger.debug("%s - event = connection_made(%s)", self.side, transport) + + transport = cast(asyncio.Transport, transport) + transport.set_write_buffer_limits(self.write_limit) + self.transport = transport + + # Copied from asyncio.StreamReaderProtocol + self.reader.set_transport(transport) + + def connection_lost(self, exc: Optional[Exception]) -> None: + """ + 7.1.4. The WebSocket Connection is Closed. + + """ + logger.debug("%s - event = connection_lost(%s)", self.side, exc) + self.state = State.CLOSED + logger.debug("%s - state = CLOSED", self.side) + if not hasattr(self, "close_code"): + self.close_code = 1006 + if not hasattr(self, "close_reason"): + self.close_reason = "" + logger.debug( + "%s x code = %d, reason = %s", + self.side, + self.close_code, + self.close_reason or "[no reason]", + ) + self.abort_pings() + # If self.connection_lost_waiter isn't pending, that's a bug, because: + # - it's set only here in connection_lost() which is called only once; + # - it must never be canceled. + self.connection_lost_waiter.set_result(None) + + if True: # pragma: no cover + + # Copied from asyncio.StreamReaderProtocol + if self.reader is not None: + if exc is None: + self.reader.feed_eof() + else: + self.reader.set_exception(exc) + + # Copied from asyncio.FlowControlMixin + # Wake up the writer if currently paused. + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + waiter.set_exception(exc) + + def pause_writing(self) -> None: # pragma: no cover + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: # pragma: no cover + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def data_received(self, data: bytes) -> None: + logger.debug("%s - event = data_received(<%d bytes>)", self.side, len(data)) + self.reader.feed_data(data) + + def eof_received(self) -> None: + """ + Close the transport after receiving EOF. + + The WebSocket protocol has its own closing handshake: endpoints close + the TCP or TLS connection after sending and receiving a close frame. + + As a consequence, they never need to write after receiving EOF, so + there's no reason to keep the transport open by returning ``True``. + + Besides, that doesn't work on TLS connections. + + """ + logger.debug("%s - event = eof_received()", self.side) + self.reader.feed_eof() diff --git a/src/websockets/asyncio_server.py b/src/websockets/legacy/server.py similarity index 97% rename from src/websockets/asyncio_server.py rename to src/websockets/legacy/server.py index 79ceddf4b..4dea9459d 100644 --- a/src/websockets/asyncio_server.py +++ b/src/websockets/legacy/server.py @@ -1,5 +1,5 @@ """ -:mod:`websockets.server` defines the WebSocket server APIs. +:mod:`websockets.legacy.server` defines the WebSocket server APIs. """ @@ -28,8 +28,8 @@ cast, ) -from .datastructures import Headers, HeadersLike, MultipleValuesError -from .exceptions import ( +from ..datastructures import Headers, HeadersLike, MultipleValuesError +from ..exceptions import ( AbortHandshake, InvalidHandshake, InvalidHeader, @@ -38,14 +38,14 @@ InvalidUpgrade, NegotiationError, ) -from .extensions.base import Extension, ServerExtensionFactory -from .extensions.permessage_deflate import enable_server_permessage_deflate -from .handshake_legacy import build_response, check_request -from .headers import build_extension, parse_extension, parse_subprotocol -from .http import USER_AGENT -from .http_legacy import read_request +from ..extensions.base import Extension, ServerExtensionFactory +from ..extensions.permessage_deflate import enable_server_permessage_deflate +from ..headers import build_extension, parse_extension, parse_subprotocol +from ..http import USER_AGENT +from ..typing import ExtensionHeader, Origin, Subprotocol +from .handshake import build_response, check_request +from .http import read_request from .protocol import WebSocketCommonProtocol -from .typing import ExtensionHeader, Origin, Subprotocol __all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] @@ -598,7 +598,7 @@ async def handshake( class WebSocketServer: """ - WebSocket server returned by :func:`~websockets.server.serve`. + WebSocket server returned by :func:`serve`. This class provides the same interface as :class:`~asyncio.AbstractServer`, namely the @@ -770,9 +770,9 @@ class Serve: performs the closing handshake and closes the connection. Awaiting :func:`serve` yields a :class:`WebSocketServer`. This instance - provides :meth:`~websockets.server.WebSocketServer.close` and - :meth:`~websockets.server.WebSocketServer.wait_closed` methods for - terminating the server and cleaning up its resources. + provides :meth:`~WebSocketServer.close` and + :meth:`~WebSocketServer.wait_closed` methods for terminating the server + and cleaning up its resources. When a server is closed with :meth:`~WebSocketServer.close`, it closes all connections with close code 1001 (going away). Connections handlers, which @@ -835,11 +835,11 @@ class Serve: :meth:`~WebSocketServerProtocol.select_subprotocol` for details Since there's no useful way to propagate exceptions triggered in handlers, - they're sent to the ``'websockets.asyncio_server'`` logger instead. + they're sent to the ``'websockets.legacy.server'`` logger instead. Debugging is much easier if you configure logging to print them:: import logging - logger = logging.getLogger("websockets.asyncio_server") + logger = logging.getLogger("websockets.legacy.server") logger.setLevel(logging.ERROR) logger.addHandler(logging.StreamHandler()) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py index 1552fb060..287f92a57 100644 --- a/src/websockets/protocol.py +++ b/src/websockets/protocol.py @@ -1,1465 +1 @@ -""" -:mod:`websockets.protocol` handles WebSocket control and data frames. - -See `sections 4 to 8 of RFC 6455`_. - -.. _sections 4 to 8 of RFC 6455: http://tools.ietf.org/html/rfc6455#section-4 - -""" - -import asyncio -import codecs -import collections -import enum -import logging -import random -import struct -import sys -import warnings -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Awaitable, - Deque, - Dict, - Iterable, - List, - Mapping, - Optional, - Union, - cast, -) - -from .datastructures import Headers -from .exceptions import ( - ConnectionClosed, - ConnectionClosedError, - ConnectionClosedOK, - InvalidState, - PayloadTooBig, - ProtocolError, -) -from .extensions.base import Extension -from .frames import ( - OP_BINARY, - OP_CLOSE, - OP_CONT, - OP_PING, - OP_PONG, - OP_TEXT, - Opcode, - parse_close, - prepare_ctrl, - prepare_data, - serialize_close, -) - - -with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", "websockets.framing is deprecated", DeprecationWarning - ) - from .framing import Frame - -from .typing import Data, Subprotocol - - -__all__ = ["WebSocketCommonProtocol"] - -logger = logging.getLogger(__name__) - - -# A WebSocket connection goes through the following four states, in order: - - -class State(enum.IntEnum): - CONNECTING, OPEN, CLOSING, CLOSED = range(4) - - -# In order to ensure consistency, the code always checks the current value of -# WebSocketCommonProtocol.state before assigning a new value and never yields -# between the check and the assignment. - - -class WebSocketCommonProtocol(asyncio.Protocol): - """ - :class:`~asyncio.Protocol` subclass implementing the data transfer phase. - - Once the WebSocket connection is established, during the data transfer - phase, the protocol is almost symmetrical between the server side and the - client side. :class:`WebSocketCommonProtocol` implements logic that's - shared between servers and clients.. - - Subclasses such as :class:`~websockets.server.WebSocketServerProtocol` and - :class:`~websockets.client.WebSocketClientProtocol` implement the opening - handshake, which is different between servers and clients. - - :class:`WebSocketCommonProtocol` performs four functions: - - * It runs a task that stores incoming data frames in a queue and makes - them available with the :meth:`recv` coroutine. - * It sends outgoing data frames with the :meth:`send` coroutine. - * It deals with control frames automatically. - * It performs the closing handshake. - - :class:`WebSocketCommonProtocol` supports asynchronous iteration:: - - async for message in websocket: - await process(message) - - The iterator yields incoming messages. It exits normally when the - connection is closed with the close code 1000 (OK) or 1001 (going away). - It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception - when the connection is closed with any other code. - - Once the connection is open, a `Ping frame`_ is sent every - ``ping_interval`` seconds. This serves as a keepalive. It helps keeping - the connection open, especially in the presence of proxies with short - timeouts on inactive connections. Set ``ping_interval`` to ``None`` to - disable this behavior. - - .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 - - If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` - seconds, the connection is considered unusable and is closed with - code 1011. This ensures that the remote endpoint remains responsive. Set - ``ping_timeout`` to ``None`` to disable this behavior. - - .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3 - - The ``close_timeout`` parameter defines a maximum wait time in seconds for - completing the closing handshake and terminating the TCP connection. - :meth:`close` completes in at most ``4 * close_timeout`` on the server - side and ``5 * close_timeout`` on the client side. - - ``close_timeout`` needs to be a parameter of the protocol because - ``websockets`` usually calls :meth:`close` implicitly: - - - on the server side, when the connection handler terminates, - - on the client side, when exiting the context manager for the connection. - - To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. - - The ``max_size`` parameter enforces the maximum size for incoming messages - in bytes. The default value is 1 MiB. ``None`` disables the limit. If a - message larger than the maximum size is received, :meth:`recv` will - raise :exc:`~websockets.exceptions.ConnectionClosedError` and the - connection will be closed with code 1009. - - The ``max_queue`` parameter sets the maximum length of the queue that - holds incoming messages. The default value is ``32``. ``None`` disables - the limit. Messages are added to an in-memory queue when they're received; - then :meth:`recv` pops from that queue. In order to prevent excessive - memory consumption when messages are received faster than they can be - processed, the queue must be bounded. If the queue fills up, the protocol - stops processing incoming data until :meth:`recv` is called. In this - situation, various receive buffers (at least in ``asyncio`` and in the OS) - will fill up, then the TCP receive window will shrink, slowing down - transmission to avoid packet loss. - - Since Python can use up to 4 bytes of memory to represent a single - character, each connection may use up to ``4 * max_size * max_queue`` - bytes of memory to store incoming messages. By default, this is 128 MiB. - You may want to lower the limits, depending on your application's - requirements. - - The ``read_limit`` argument sets the high-water limit of the buffer for - incoming bytes. The low-water limit is half the high-water limit. The - default value is 64 KiB, half of asyncio's default (based on the current - implementation of :class:`~asyncio.StreamReader`). - - The ``write_limit`` argument sets the high-water limit of the buffer for - outgoing bytes. The low-water limit is a quarter of the high-water limit. - The default value is 64 KiB, equal to asyncio's default (based on the - current implementation of ``FlowControlMixin``). - - As soon as the HTTP request and response in the opening handshake are - processed: - - * the request path is available in the :attr:`path` attribute; - * the request and response HTTP headers are available in the - :attr:`request_headers` and :attr:`response_headers` attributes, - which are :class:`~websockets.http.Headers` instances. - - If a subprotocol was negotiated, it's available in the :attr:`subprotocol` - attribute. - - Once the connection is closed, the code is available in the - :attr:`close_code` attribute and the reason in :attr:`close_reason`. - - All these attributes must be treated as read-only. - - """ - - # There are only two differences between the client-side and server-side - # behavior: masking the payload and closing the underlying TCP connection. - # Set is_client = True/False and side = "client"/"server" to pick a side. - is_client: bool - side: str = "undefined" - - def __init__( - self, - *, - ping_interval: Optional[float] = 20, - ping_timeout: Optional[float] = 20, - close_timeout: Optional[float] = None, - max_size: Optional[int] = 2 ** 20, - max_queue: Optional[int] = 2 ** 5, - read_limit: int = 2 ** 16, - write_limit: int = 2 ** 16, - loop: Optional[asyncio.AbstractEventLoop] = None, - # The following arguments are kept only for backwards compatibility. - host: Optional[str] = None, - port: Optional[int] = None, - secure: Optional[bool] = None, - legacy_recv: bool = False, - timeout: Optional[float] = None, - ) -> None: - # Backwards compatibility: close_timeout used to be called timeout. - if timeout is None: - timeout = 10 - else: - warnings.warn("rename timeout to close_timeout", DeprecationWarning) - # If both are specified, timeout is ignored. - if close_timeout is None: - close_timeout = timeout - - self.ping_interval = ping_interval - self.ping_timeout = ping_timeout - self.close_timeout = close_timeout - self.max_size = max_size - self.max_queue = max_queue - self.read_limit = read_limit - self.write_limit = write_limit - - if loop is None: - loop = asyncio.get_event_loop() - self.loop = loop - - self._host = host - self._port = port - self._secure = secure - self.legacy_recv = legacy_recv - - # Configure read buffer limits. The high-water limit is defined by - # ``self.read_limit``. The ``limit`` argument controls the line length - # limit and half the buffer limit of :class:`~asyncio.StreamReader`. - # That's why it must be set to half of ``self.read_limit``. - self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop) - - # Copied from asyncio.FlowControlMixin - self._paused = False - self._drain_waiter: Optional[asyncio.Future[None]] = None - - self._drain_lock = asyncio.Lock( - loop=loop if sys.version_info[:2] < (3, 8) else None - ) - - # This class implements the data transfer and closing handshake, which - # are shared between the client-side and the server-side. - # Subclasses implement the opening handshake and, on success, execute - # :meth:`connection_open` to change the state to OPEN. - self.state = State.CONNECTING - logger.debug("%s - state = CONNECTING", self.side) - - # HTTP protocol parameters. - self.path: str - self.request_headers: Headers - self.response_headers: Headers - - # WebSocket protocol parameters. - self.extensions: List[Extension] = [] - self.subprotocol: Optional[Subprotocol] = None - - # The close code and reason are set when receiving a close frame or - # losing the TCP connection. - self.close_code: int - self.close_reason: str - - # Completed when the connection state becomes CLOSED. Translates the - # :meth:`connection_lost` callback to a :class:`~asyncio.Future` - # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are - # translated by ``self.stream_reader``). - self.connection_lost_waiter: asyncio.Future[None] = loop.create_future() - - # Queue of received messages. - self.messages: Deque[Data] = collections.deque() - self._pop_message_waiter: Optional[asyncio.Future[None]] = None - self._put_message_waiter: Optional[asyncio.Future[None]] = None - - # Protect sending fragmented messages. - self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None - - # Mapping of ping IDs to pong waiters, in chronological order. - self.pings: Dict[bytes, asyncio.Future[None]] = {} - - # Task running the data transfer. - self.transfer_data_task: asyncio.Task[None] - - # Exception that occurred during data transfer, if any. - self.transfer_data_exc: Optional[BaseException] = None - - # Task sending keepalive pings. - self.keepalive_ping_task: asyncio.Task[None] - - # Task closing the TCP connection. - self.close_connection_task: asyncio.Task[None] - - # Copied from asyncio.FlowControlMixin - async def _drain_helper(self) -> None: # pragma: no cover - if self.connection_lost_waiter.done(): - raise ConnectionResetError("Connection lost") - if not self._paused: - return - waiter = self._drain_waiter - assert waiter is None or waiter.cancelled() - waiter = self.loop.create_future() - self._drain_waiter = waiter - await waiter - - # Copied from asyncio.StreamWriter - async def _drain(self) -> None: # pragma: no cover - if self.reader is not None: - exc = self.reader.exception() - if exc is not None: - raise exc - if self.transport is not None: - if self.transport.is_closing(): - # Yield to the event loop so connection_lost() may be - # called. Without this, _drain_helper() would return - # immediately, and code that calls - # write(...); yield from drain() - # in a loop would never call connection_lost(), so it - # would not see an error when the socket is closed. - await asyncio.sleep( - 0, loop=self.loop if sys.version_info[:2] < (3, 8) else None - ) - await self._drain_helper() - - def connection_open(self) -> None: - """ - Callback when the WebSocket opening handshake completes. - - Enter the OPEN state and start the data transfer phase. - - """ - # 4.1. The WebSocket Connection is Established. - assert self.state is State.CONNECTING - self.state = State.OPEN - logger.debug("%s - state = OPEN", self.side) - # Start the task that receives incoming WebSocket messages. - self.transfer_data_task = self.loop.create_task(self.transfer_data()) - # Start the task that sends pings at regular intervals. - self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping()) - # Start the task that eventually closes the TCP connection. - self.close_connection_task = self.loop.create_task(self.close_connection()) - - @property - def host(self) -> Optional[str]: - alternative = "remote_address" if self.is_client else "local_address" - warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning) - return self._host - - @property - def port(self) -> Optional[int]: - alternative = "remote_address" if self.is_client else "local_address" - warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning) - return self._port - - @property - def secure(self) -> Optional[bool]: - warnings.warn("don't use secure", DeprecationWarning) - return self._secure - - # Public API - - @property - def local_address(self) -> Any: - """ - Local address of the connection as a ``(host, port)`` tuple. - - When the connection isn't open, ``local_address`` is ``None``. - - """ - try: - transport = self.transport - except AttributeError: - return None - else: - return transport.get_extra_info("sockname") - - @property - def remote_address(self) -> Any: - """ - Remote address of the connection as a ``(host, port)`` tuple. - - When the connection isn't open, ``remote_address`` is ``None``. - - """ - try: - transport = self.transport - except AttributeError: - return None - else: - return transport.get_extra_info("peername") - - @property - def open(self) -> bool: - """ - ``True`` when the connection is usable. - - It may be used to detect disconnections. However, this approach is - discouraged per the EAFP_ principle. - - When ``open`` is ``False``, using the connection raises a - :exc:`~websockets.exceptions.ConnectionClosed` exception. - - .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp - - """ - return self.state is State.OPEN and not self.transfer_data_task.done() - - @property - def closed(self) -> bool: - """ - ``True`` once the connection is closed. - - Be aware that both :attr:`open` and :attr:`closed` are ``False`` during - the opening and closing sequences. - - """ - return self.state is State.CLOSED - - async def wait_closed(self) -> None: - """ - Wait until the connection is closed. - - This is identical to :attr:`closed`, except it can be awaited. - - This can make it easier to handle connection termination, regardless - of its cause, in tasks that interact with the WebSocket connection. - - """ - await asyncio.shield(self.connection_lost_waiter) - - async def __aiter__(self) -> AsyncIterator[Data]: - """ - Iterate on received messages. - - Exit normally when the connection is closed with code 1000 or 1001. - - Raise an exception in other cases. - - """ - try: - while True: - yield await self.recv() - except ConnectionClosedOK: - return - - async def recv(self) -> Data: - """ - Receive the next message. - - Return a :class:`str` for a text frame and :class:`bytes` for a binary - frame. - - When the end of the message stream is reached, :meth:`recv` raises - :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it - raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal - connection closure and - :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol - error or a network failure. - - .. versionchanged:: 3.0 - - :meth:`recv` used to return ``None`` instead. Refer to the - changelog for details. - - Canceling :meth:`recv` is safe. There's no risk of losing the next - message. The next invocation of :meth:`recv` will return it. This - makes it possible to enforce a timeout by wrapping :meth:`recv` in - :func:`~asyncio.wait_for`. - - :raises ~websockets.exceptions.ConnectionClosed: when the - connection is closed - :raises RuntimeError: if two coroutines call :meth:`recv` concurrently - - """ - if self._pop_message_waiter is not None: - raise RuntimeError( - "cannot call recv while another coroutine " - "is already waiting for the next message" - ) - - # Don't await self.ensure_open() here: - # - messages could be available in the queue even if the connection - # is closed; - # - messages could be received before the closing frame even if the - # connection is closing. - - # Wait until there's a message in the queue (if necessary) or the - # connection is closed. - while len(self.messages) <= 0: - pop_message_waiter: asyncio.Future[None] = self.loop.create_future() - self._pop_message_waiter = pop_message_waiter - try: - # If asyncio.wait() is canceled, it doesn't cancel - # pop_message_waiter and self.transfer_data_task. - await asyncio.wait( - [pop_message_waiter, self.transfer_data_task], - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - return_when=asyncio.FIRST_COMPLETED, - ) - finally: - self._pop_message_waiter = None - - # If asyncio.wait(...) exited because self.transfer_data_task - # completed before receiving a new message, raise a suitable - # exception (or return None if legacy_recv is enabled). - if not pop_message_waiter.done(): - if self.legacy_recv: - return None # type: ignore - else: - # Wait until the connection is closed to raise - # ConnectionClosed with the correct code and reason. - await self.ensure_open() - - # Pop a message from the queue. - message = self.messages.popleft() - - # Notify transfer_data(). - if self._put_message_waiter is not None: - self._put_message_waiter.set_result(None) - self._put_message_waiter = None - - return message - - async def send( - self, message: Union[Data, Iterable[Data], AsyncIterable[Data]] - ) -> None: - """ - Send a message. - - A string (:class:`str`) is sent as a `Text frame`_. A bytestring or - bytes-like object (:class:`bytes`, :class:`bytearray`, or - :class:`memoryview`) is sent as a `Binary frame`_. - - .. _Text frame: https://tools.ietf.org/html/rfc6455#section-5.6 - .. _Binary frame: https://tools.ietf.org/html/rfc6455#section-5.6 - - :meth:`send` also accepts an iterable or an asynchronous iterable of - strings, bytestrings, or bytes-like objects. In that case the message - is fragmented. Each item is treated as a message fragment and sent in - its own frame. All items must be of the same type, or else - :meth:`send` will raise a :exc:`TypeError` and the connection will be - closed. - - :meth:`send` rejects dict-like objects because this is often an error. - If you wish to send the keys of a dict-like object as fragments, call - its :meth:`~dict.keys` method and pass the result to :meth:`send`. - - Canceling :meth:`send` is discouraged. Instead, you should close the - connection with :meth:`close`. Indeed, there only two situations where - :meth:`send` yields control to the event loop: - - 1. The write buffer is full. If you don't want to wait until enough - data is sent, your only alternative is to close the connection. - :meth:`close` will likely time out then abort the TCP connection. - 2. ``message`` is an asynchronous iterator. Stopping in the middle of - a fragmented message will cause a protocol error. Closing the - connection has the same effect. - - :raises TypeError: for unsupported inputs - - """ - await self.ensure_open() - - # While sending a fragmented message, prevent sending other messages - # until all fragments are sent. - while self._fragmented_message_waiter is not None: - await asyncio.shield(self._fragmented_message_waiter) - - # Unfragmented message -- this case must be handled first because - # strings and bytes-like objects are iterable. - - if isinstance(message, (str, bytes, bytearray, memoryview)): - opcode, data = prepare_data(message) - await self.write_frame(True, opcode, data) - - # Catch a common mistake -- passing a dict to send(). - - elif isinstance(message, Mapping): - raise TypeError("data is a dict-like object") - - # Fragmented message -- regular iterator. - - elif isinstance(message, Iterable): - - # Work around https://github.com/python/mypy/issues/6227 - message = cast(Iterable[Data], message) - - iter_message = iter(message) - try: - message_chunk = next(iter_message) - except StopIteration: - return - opcode, data = prepare_data(message_chunk) - - self._fragmented_message_waiter = asyncio.Future() - try: - # First fragment. - await self.write_frame(False, opcode, data) - - # Other fragments. - for message_chunk in iter_message: - confirm_opcode, data = prepare_data(message_chunk) - if confirm_opcode != opcode: - raise TypeError("data contains inconsistent types") - await self.write_frame(False, OP_CONT, data) - - # Final fragment. - await self.write_frame(True, OP_CONT, b"") - - except Exception: - # We're half-way through a fragmented message and we can't - # complete it. This makes the connection unusable. - self.fail_connection(1011) - raise - - finally: - self._fragmented_message_waiter.set_result(None) - self._fragmented_message_waiter = None - - # Fragmented message -- asynchronous iterator - - elif isinstance(message, AsyncIterable): - # aiter_message = aiter(message) without aiter - # https://github.com/python/mypy/issues/5738 - aiter_message = type(message).__aiter__(message) # type: ignore - try: - # message_chunk = anext(aiter_message) without anext - # https://github.com/python/mypy/issues/5738 - message_chunk = await type(aiter_message).__anext__( # type: ignore - aiter_message - ) - except StopAsyncIteration: - return - opcode, data = prepare_data(message_chunk) - - self._fragmented_message_waiter = asyncio.Future() - try: - # First fragment. - await self.write_frame(False, opcode, data) - - # Other fragments. - # https://github.com/python/mypy/issues/5738 - async for message_chunk in aiter_message: # type: ignore - confirm_opcode, data = prepare_data(message_chunk) - if confirm_opcode != opcode: - raise TypeError("data contains inconsistent types") - await self.write_frame(False, OP_CONT, data) - - # Final fragment. - await self.write_frame(True, OP_CONT, b"") - - except Exception: - # We're half-way through a fragmented message and we can't - # complete it. This makes the connection unusable. - self.fail_connection(1011) - raise - - finally: - self._fragmented_message_waiter.set_result(None) - self._fragmented_message_waiter = None - - else: - raise TypeError("data must be bytes, str, or iterable") - - async def close(self, code: int = 1000, reason: str = "") -> None: - """ - Perform the closing handshake. - - :meth:`close` waits for the other end to complete the handshake and - for the TCP connection to terminate. As a consequence, there's no need - to await :meth:`wait_closed`; :meth:`close` already does it. - - :meth:`close` is idempotent: it doesn't do anything once the - connection is closed. - - Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given - that errors during connection termination aren't particularly useful. - - Canceling :meth:`close` is discouraged. If it takes too long, you can - set a shorter ``close_timeout``. If you don't want to wait, let the - Python process exit, then the OS will close the TCP connection. - - :param code: WebSocket close code - :param reason: WebSocket close reason - - """ - try: - await asyncio.wait_for( - self.write_close_frame(serialize_close(code, reason)), - self.close_timeout, - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - except asyncio.TimeoutError: - # If the close frame cannot be sent because the send buffers - # are full, the closing handshake won't complete anyway. - # Fail the connection to shut down faster. - self.fail_connection() - - # If no close frame is received within the timeout, wait_for() cancels - # the data transfer task and raises TimeoutError. - - # If close() is called multiple times concurrently and one of these - # calls hits the timeout, the data transfer task will be cancelled. - # Other calls will receive a CancelledError here. - - try: - # If close() is canceled during the wait, self.transfer_data_task - # is canceled before the timeout elapses. - await asyncio.wait_for( - self.transfer_data_task, - self.close_timeout, - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - except (asyncio.TimeoutError, asyncio.CancelledError): - pass - - # Wait for the close connection task to close the TCP connection. - await asyncio.shield(self.close_connection_task) - - async def ping(self, data: Optional[Data] = None) -> Awaitable[None]: - """ - Send a ping. - - Return a :class:`~asyncio.Future` that will be completed when the - corresponding pong is received. You can ignore it if you don't intend - to wait. - - A ping may serve as a keepalive or as a check that the remote endpoint - received all messages up to this point:: - - pong_waiter = await ws.ping() - await pong_waiter # only if you want to wait for the pong - - By default, the ping contains four random bytes. This payload may be - overridden with the optional ``data`` argument which must be a string - (which will be encoded to UTF-8) or a bytes-like object. - - Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return - immediately, it means the write buffer is full. If you don't want to - wait, you should close the connection. - - Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no - effect. - - """ - await self.ensure_open() - - if data is not None: - data = prepare_ctrl(data) - - # Protect against duplicates if a payload is explicitly set. - if data in self.pings: - raise ValueError("already waiting for a pong with the same data") - - # Generate a unique random payload otherwise. - while data is None or data in self.pings: - data = struct.pack("!I", random.getrandbits(32)) - - self.pings[data] = self.loop.create_future() - - await self.write_frame(True, OP_PING, data) - - return asyncio.shield(self.pings[data]) - - async def pong(self, data: Data = b"") -> None: - """ - Send a pong. - - An unsolicited pong may serve as a unidirectional heartbeat. - - The payload may be set with the optional ``data`` argument which must - be a string (which will be encoded to UTF-8) or a bytes-like object. - - Canceling :meth:`pong` is discouraged for the same reason as - :meth:`ping`. - - """ - await self.ensure_open() - - data = prepare_ctrl(data) - - await self.write_frame(True, OP_PONG, data) - - # Private methods - no guarantees. - - def connection_closed_exc(self) -> ConnectionClosed: - exception: ConnectionClosed - if self.close_code == 1000 or self.close_code == 1001: - exception = ConnectionClosedOK(self.close_code, self.close_reason) - else: - exception = ConnectionClosedError(self.close_code, self.close_reason) - # Chain to the exception that terminated data transfer, if any. - exception.__cause__ = self.transfer_data_exc - return exception - - async def ensure_open(self) -> None: - """ - Check that the WebSocket connection is open. - - Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't. - - """ - # Handle cases from most common to least common for performance. - if self.state is State.OPEN: - # If self.transfer_data_task exited without a closing handshake, - # self.close_connection_task may be closing the connection, going - # straight from OPEN to CLOSED. - if self.transfer_data_task.done(): - await asyncio.shield(self.close_connection_task) - raise self.connection_closed_exc() - else: - return - - if self.state is State.CLOSED: - raise self.connection_closed_exc() - - if self.state is State.CLOSING: - # If we started the closing handshake, wait for its completion to - # get the proper close code and reason. self.close_connection_task - # will complete within 4 or 5 * close_timeout after close(). The - # CLOSING state also occurs when failing the connection. In that - # case self.close_connection_task will complete even faster. - await asyncio.shield(self.close_connection_task) - raise self.connection_closed_exc() - - # Control may only reach this point in buggy third-party subclasses. - assert self.state is State.CONNECTING - raise InvalidState("WebSocket connection isn't established yet") - - async def transfer_data(self) -> None: - """ - Read incoming messages and put them in a queue. - - This coroutine runs in a task until the closing handshake is started. - - """ - try: - while True: - message = await self.read_message() - - # Exit the loop when receiving a close frame. - if message is None: - break - - # Wait until there's room in the queue (if necessary). - if self.max_queue is not None: - while len(self.messages) >= self.max_queue: - self._put_message_waiter = self.loop.create_future() - try: - await asyncio.shield(self._put_message_waiter) - finally: - self._put_message_waiter = None - - # Put the message in the queue. - self.messages.append(message) - - # Notify recv(). - if self._pop_message_waiter is not None: - self._pop_message_waiter.set_result(None) - self._pop_message_waiter = None - - except asyncio.CancelledError as exc: - self.transfer_data_exc = exc - # If fail_connection() cancels this task, avoid logging the error - # twice and failing the connection again. - raise - - except ProtocolError as exc: - self.transfer_data_exc = exc - self.fail_connection(1002) - - except (ConnectionError, TimeoutError, EOFError) as exc: - # Reading data with self.reader.readexactly may raise: - # - most subclasses of ConnectionError if the TCP connection - # breaks, is reset, or is aborted; - # - TimeoutError if the TCP connection times out; - # - IncompleteReadError, a subclass of EOFError, if fewer - # bytes are available than requested. - self.transfer_data_exc = exc - self.fail_connection(1006) - - except UnicodeDecodeError as exc: - self.transfer_data_exc = exc - self.fail_connection(1007) - - except PayloadTooBig as exc: - self.transfer_data_exc = exc - self.fail_connection(1009) - - except Exception as exc: - # This shouldn't happen often because exceptions expected under - # regular circumstances are handled above. If it does, consider - # catching and handling more exceptions. - logger.error("Error in data transfer", exc_info=True) - - self.transfer_data_exc = exc - self.fail_connection(1011) - - async def read_message(self) -> Optional[Data]: - """ - Read a single message from the connection. - - Re-assemble data frames if the message is fragmented. - - Return ``None`` when the closing handshake is started. - - """ - frame = await self.read_data_frame(max_size=self.max_size) - - # A close frame was received. - if frame is None: - return None - - if frame.opcode == OP_TEXT: - text = True - elif frame.opcode == OP_BINARY: - text = False - else: # frame.opcode == OP_CONT - raise ProtocolError("unexpected opcode") - - # Shortcut for the common case - no fragmentation - if frame.fin: - return frame.data.decode("utf-8") if text else frame.data - - # 5.4. Fragmentation - chunks: List[Data] = [] - max_size = self.max_size - if text: - decoder_factory = codecs.getincrementaldecoder("utf-8") - decoder = decoder_factory(errors="strict") - if max_size is None: - - def append(frame: Frame) -> None: - nonlocal chunks - chunks.append(decoder.decode(frame.data, frame.fin)) - - else: - - def append(frame: Frame) -> None: - nonlocal chunks, max_size - chunks.append(decoder.decode(frame.data, frame.fin)) - assert isinstance(max_size, int) - max_size -= len(frame.data) - - else: - if max_size is None: - - def append(frame: Frame) -> None: - nonlocal chunks - chunks.append(frame.data) - - else: - - def append(frame: Frame) -> None: - nonlocal chunks, max_size - chunks.append(frame.data) - assert isinstance(max_size, int) - max_size -= len(frame.data) - - append(frame) - - while not frame.fin: - frame = await self.read_data_frame(max_size=max_size) - if frame is None: - raise ProtocolError("incomplete fragmented message") - if frame.opcode != OP_CONT: - raise ProtocolError("unexpected opcode") - append(frame) - - # mypy cannot figure out that chunks have the proper type. - return ("" if text else b"").join(chunks) # type: ignore - - async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]: - """ - Read a single data frame from the connection. - - Process control frames received before the next data frame. - - Return ``None`` if a close frame is encountered before any data frame. - - """ - # 6.2. Receiving Data - while True: - frame = await self.read_frame(max_size) - - # 5.5. Control Frames - if frame.opcode == OP_CLOSE: - # 7.1.5. The WebSocket Connection Close Code - # 7.1.6. The WebSocket Connection Close Reason - self.close_code, self.close_reason = parse_close(frame.data) - try: - # Echo the original data instead of re-serializing it with - # serialize_close() because that fails when the close frame - # is empty and parse_close() synthetizes a 1005 close code. - await self.write_close_frame(frame.data) - except ConnectionClosed: - # It doesn't really matter if the connection was closed - # before we could send back a close frame. - pass - return None - - elif frame.opcode == OP_PING: - # Answer pings. - ping_hex = frame.data.hex() or "[empty]" - logger.debug( - "%s - received ping, sending pong: %s", self.side, ping_hex - ) - await self.pong(frame.data) - - elif frame.opcode == OP_PONG: - # Acknowledge pings on solicited pongs. - if frame.data in self.pings: - logger.debug( - "%s - received solicited pong: %s", - self.side, - frame.data.hex() or "[empty]", - ) - # Acknowledge all pings up to the one matching this pong. - ping_id = None - ping_ids = [] - for ping_id, ping in self.pings.items(): - ping_ids.append(ping_id) - if not ping.done(): - ping.set_result(None) - if ping_id == frame.data: - break - else: # pragma: no cover - assert False, "ping_id is in self.pings" - # Remove acknowledged pings from self.pings. - for ping_id in ping_ids: - del self.pings[ping_id] - ping_ids = ping_ids[:-1] - if ping_ids: - pings_hex = ", ".join( - ping_id.hex() or "[empty]" for ping_id in ping_ids - ) - plural = "s" if len(ping_ids) > 1 else "" - logger.debug( - "%s - acknowledged previous ping%s: %s", - self.side, - plural, - pings_hex, - ) - else: - logger.debug( - "%s - received unsolicited pong: %s", - self.side, - frame.data.hex() or "[empty]", - ) - - # 5.6. Data Frames - else: - return frame - - async def read_frame(self, max_size: Optional[int]) -> Frame: - """ - Read a single frame from the connection. - - """ - frame = await Frame.read( - self.reader.readexactly, - mask=not self.is_client, - max_size=max_size, - extensions=self.extensions, - ) - logger.debug("%s < %r", self.side, frame) - return frame - - async def write_frame( - self, fin: bool, opcode: int, data: bytes, *, _expected_state: int = State.OPEN - ) -> None: - # Defensive assertion for protocol compliance. - if self.state is not _expected_state: # pragma: no cover - raise InvalidState( - f"Cannot write to a WebSocket in the {self.state.name} state" - ) - - frame = Frame(fin, Opcode(opcode), data) - logger.debug("%s > %r", self.side, frame) - frame.write( - self.transport.write, mask=self.is_client, extensions=self.extensions - ) - - try: - # drain() cannot be called concurrently by multiple coroutines: - # http://bugs.python.org/issue29930. Remove this lock when no - # version of Python where this bugs exists is supported anymore. - async with self._drain_lock: - # Handle flow control automatically. - await self._drain() - except ConnectionError: - # Terminate the connection if the socket died. - self.fail_connection() - # Wait until the connection is closed to raise ConnectionClosed - # with the correct code and reason. - await self.ensure_open() - - async def write_close_frame(self, data: bytes = b"") -> None: - """ - Write a close frame if and only if the connection state is OPEN. - - This dedicated coroutine must be used for writing close frames to - ensure that at most one close frame is sent on a given connection. - - """ - # Test and set the connection state before sending the close frame to - # avoid sending two frames in case of concurrent calls. - if self.state is State.OPEN: - # 7.1.3. The WebSocket Closing Handshake is Started - self.state = State.CLOSING - logger.debug("%s - state = CLOSING", self.side) - - # 7.1.2. Start the WebSocket Closing Handshake - await self.write_frame(True, OP_CLOSE, data, _expected_state=State.CLOSING) - - async def keepalive_ping(self) -> None: - """ - Send a Ping frame and wait for a Pong frame at regular intervals. - - This coroutine exits when the connection terminates and one of the - following happens: - - - :meth:`ping` raises :exc:`ConnectionClosed`, or - - :meth:`close_connection` cancels :attr:`keepalive_ping_task`. - - """ - if self.ping_interval is None: - return - - try: - while True: - await asyncio.sleep( - self.ping_interval, - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - - # ping() raises CancelledError if the connection is closed, - # when close_connection() cancels self.keepalive_ping_task. - - # ping() raises ConnectionClosed if the connection is lost, - # when connection_lost() calls abort_pings(). - - pong_waiter = await self.ping() - - if self.ping_timeout is not None: - try: - await asyncio.wait_for( - pong_waiter, - self.ping_timeout, - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - except asyncio.TimeoutError: - logger.debug("%s ! timed out waiting for pong", self.side) - self.fail_connection(1011) - break - - # Remove this branch when dropping support for Python < 3.8 - # because CancelledError no longer inherits Exception. - except asyncio.CancelledError: - raise - - except ConnectionClosed: - pass - - except Exception: - logger.warning("Unexpected exception in keepalive ping task", exc_info=True) - - async def close_connection(self) -> None: - """ - 7.1.1. Close the WebSocket Connection - - When the opening handshake succeeds, :meth:`connection_open` starts - this coroutine in a task. It waits for the data transfer phase to - complete then it closes the TCP connection cleanly. - - When the opening handshake fails, :meth:`fail_connection` does the - same. There's no data transfer phase in that case. - - """ - try: - # Wait for the data transfer phase to complete. - if hasattr(self, "transfer_data_task"): - try: - await self.transfer_data_task - except asyncio.CancelledError: - pass - - # Cancel the keepalive ping task. - if hasattr(self, "keepalive_ping_task"): - self.keepalive_ping_task.cancel() - - # A client should wait for a TCP close from the server. - if self.is_client and hasattr(self, "transfer_data_task"): - if await self.wait_for_connection_lost(): - # Coverage marks this line as a partially executed branch. - # I supect a bug in coverage. Ignore it for now. - return # pragma: no cover - logger.debug("%s ! timed out waiting for TCP close", self.side) - - # Half-close the TCP connection if possible (when there's no TLS). - if self.transport.can_write_eof(): - logger.debug("%s x half-closing TCP connection", self.side) - self.transport.write_eof() - - if await self.wait_for_connection_lost(): - # Coverage marks this line as a partially executed branch. - # I supect a bug in coverage. Ignore it for now. - return # pragma: no cover - logger.debug("%s ! timed out waiting for TCP close", self.side) - - finally: - # The try/finally ensures that the transport never remains open, - # even if this coroutine is canceled (for example). - - # If connection_lost() was called, the TCP connection is closed. - # However, if TLS is enabled, the transport still needs closing. - # Else asyncio complains: ResourceWarning: unclosed transport. - if self.connection_lost_waiter.done() and self.transport.is_closing(): - return - - # Close the TCP connection. Buffers are flushed asynchronously. - logger.debug("%s x closing TCP connection", self.side) - self.transport.close() - - if await self.wait_for_connection_lost(): - return - logger.debug("%s ! timed out waiting for TCP close", self.side) - - # Abort the TCP connection. Buffers are discarded. - logger.debug("%s x aborting TCP connection", self.side) - self.transport.abort() - - # connection_lost() is called quickly after aborting. - # Coverage marks this line as a partially executed branch. - # I supect a bug in coverage. Ignore it for now. - await self.wait_for_connection_lost() # pragma: no cover - - async def wait_for_connection_lost(self) -> bool: - """ - Wait until the TCP connection is closed or ``self.close_timeout`` elapses. - - Return ``True`` if the connection is closed and ``False`` otherwise. - - """ - if not self.connection_lost_waiter.done(): - try: - await asyncio.wait_for( - asyncio.shield(self.connection_lost_waiter), - self.close_timeout, - loop=self.loop if sys.version_info[:2] < (3, 8) else None, - ) - except asyncio.TimeoutError: - pass - # Re-check self.connection_lost_waiter.done() synchronously because - # connection_lost() could run between the moment the timeout occurs - # and the moment this coroutine resumes running. - return self.connection_lost_waiter.done() - - def fail_connection(self, code: int = 1006, reason: str = "") -> None: - """ - 7.1.7. Fail the WebSocket Connection - - This requires: - - 1. Stopping all processing of incoming data, which means cancelling - :attr:`transfer_data_task`. The close code will be 1006 unless a - close frame was received earlier. - - 2. Sending a close frame with an appropriate code if the opening - handshake succeeded and the other side is likely to process it. - - 3. Closing the connection. :meth:`close_connection` takes care of - this once :attr:`transfer_data_task` exits after being canceled. - - (The specification describes these steps in the opposite order.) - - """ - logger.debug( - "%s ! failing %s WebSocket connection with code %d", - self.side, - self.state.name, - code, - ) - - # Cancel transfer_data_task if the opening handshake succeeded. - # cancel() is idempotent and ignored if the task is done already. - if hasattr(self, "transfer_data_task"): - self.transfer_data_task.cancel() - - # Send a close frame when the state is OPEN (a close frame was already - # sent if it's CLOSING), except when failing the connection because of - # an error reading from or writing to the network. - # Don't send a close frame if the connection is broken. - if code != 1006 and self.state is State.OPEN: - - frame_data = serialize_close(code, reason) - - # Write the close frame without draining the write buffer. - - # Keeping fail_connection() synchronous guarantees it can't - # get stuck and simplifies the implementation of the callers. - # Not drainig the write buffer is acceptable in this context. - - # This duplicates a few lines of code from write_close_frame() - # and write_frame(). - - self.state = State.CLOSING - logger.debug("%s - state = CLOSING", self.side) - - frame = Frame(True, OP_CLOSE, frame_data) - logger.debug("%s > %r", self.side, frame) - frame.write( - self.transport.write, mask=self.is_client, extensions=self.extensions - ) - - # Start close_connection_task if the opening handshake didn't succeed. - if not hasattr(self, "close_connection_task"): - self.close_connection_task = self.loop.create_task(self.close_connection()) - - def abort_pings(self) -> None: - """ - Raise ConnectionClosed in pending keepalive pings. - - They'll never receive a pong once the connection is closed. - - """ - assert self.state is State.CLOSED - exc = self.connection_closed_exc() - - for ping in self.pings.values(): - ping.set_exception(exc) - # If the exception is never retrieved, it will be logged when ping - # is garbage-collected. This is confusing for users. - # Given that ping is done (with an exception), canceling it does - # nothing, but it prevents logging the exception. - ping.cancel() - - if self.pings: - pings_hex = ", ".join(ping_id.hex() or "[empty]" for ping_id in self.pings) - plural = "s" if len(self.pings) > 1 else "" - logger.debug( - "%s - aborted pending ping%s: %s", self.side, plural, pings_hex - ) - - # asyncio.Protocol methods - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - """ - Configure write buffer limits. - - The high-water limit is defined by ``self.write_limit``. - - The low-water limit currently defaults to ``self.write_limit // 4`` in - :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should - be all right for reasonable use cases of this library. - - This is the earliest point where we can get hold of the transport, - which means it's the best point for configuring it. - - """ - logger.debug("%s - event = connection_made(%s)", self.side, transport) - - transport = cast(asyncio.Transport, transport) - transport.set_write_buffer_limits(self.write_limit) - self.transport = transport - - # Copied from asyncio.StreamReaderProtocol - self.reader.set_transport(transport) - - def connection_lost(self, exc: Optional[Exception]) -> None: - """ - 7.1.4. The WebSocket Connection is Closed. - - """ - logger.debug("%s - event = connection_lost(%s)", self.side, exc) - self.state = State.CLOSED - logger.debug("%s - state = CLOSED", self.side) - if not hasattr(self, "close_code"): - self.close_code = 1006 - if not hasattr(self, "close_reason"): - self.close_reason = "" - logger.debug( - "%s x code = %d, reason = %s", - self.side, - self.close_code, - self.close_reason or "[no reason]", - ) - self.abort_pings() - # If self.connection_lost_waiter isn't pending, that's a bug, because: - # - it's set only here in connection_lost() which is called only once; - # - it must never be canceled. - self.connection_lost_waiter.set_result(None) - - if True: # pragma: no cover - - # Copied from asyncio.StreamReaderProtocol - if self.reader is not None: - if exc is None: - self.reader.feed_eof() - else: - self.reader.set_exception(exc) - - # Copied from asyncio.FlowControlMixin - # Wake up the writer if currently paused. - if not self._paused: - return - waiter = self._drain_waiter - if waiter is None: - return - self._drain_waiter = None - if waiter.done(): - return - if exc is None: - waiter.set_result(None) - else: - waiter.set_exception(exc) - - def pause_writing(self) -> None: # pragma: no cover - assert not self._paused - self._paused = True - - def resume_writing(self) -> None: # pragma: no cover - assert self._paused - self._paused = False - - waiter = self._drain_waiter - if waiter is not None: - self._drain_waiter = None - if not waiter.done(): - waiter.set_result(None) - - def data_received(self, data: bytes) -> None: - logger.debug("%s - event = data_received(<%d bytes>)", self.side, len(data)) - self.reader.feed_data(data) - - def eof_received(self) -> None: - """ - Close the transport after receiving EOF. - - The WebSocket protocol has its own closing handshake: endpoints close - the TCP or TLS connection after sending and receiving a close frame. - - As a consequence, they never need to write after receiving EOF, so - there's no reason to keep the transport open by returning ``True``. - - Besides, that doesn't work on TLS connections. - - """ - logger.debug("%s - event = eof_received()", self.side) - self.reader.feed_eof() +from .legacy.protocol import * # noqa diff --git a/src/websockets/server.py b/src/websockets/server.py index c2c818ce9..bd527be74 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -6,7 +6,6 @@ import logging from typing import Callable, Generator, List, Optional, Sequence, Tuple, Union, cast -from .asyncio_server import WebSocketServer, WebSocketServerProtocol, serve, unix_serve from .connection import CONNECTING, OPEN, SERVER, Connection from .datastructures import Headers, HeadersLike, MultipleValuesError from .exceptions import ( @@ -27,6 +26,12 @@ ) from .http import USER_AGENT from .http11 import Request, Response +from .legacy.server import ( # noqa + WebSocketServer, + WebSocketServerProtocol, + serve, + unix_serve, +) from .typing import ( ConnectionOption, ExtensionHeader, @@ -37,13 +42,7 @@ from .utils import accept_key -__all__ = [ - "serve", - "unix_serve", - "ServerConnection", - "WebSocketServerProtocol", - "WebSocketServer", -] +__all__ = ["ServerConnection"] logger = logging.getLogger(__name__) diff --git a/tests/__init__.py b/tests/__init__.py index 76c869f50..dd78609f5 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,15 +1,5 @@ import logging -import warnings # Avoid displaying stack traces at the ERROR logging level. logging.basicConfig(level=logging.CRITICAL) - - -# Ignore deprecation warnings while refactoring is in progress -warnings.filterwarnings( - action="ignore", - message=r"websockets\.framing is deprecated", - category=DeprecationWarning, - module="websockets.framing", -) diff --git a/tests/legacy/__init__.py b/tests/legacy/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/legacy/test_auth.py b/tests/legacy/test_auth.py new file mode 100644 index 000000000..bb8c6a6eb --- /dev/null +++ b/tests/legacy/test_auth.py @@ -0,0 +1,160 @@ +import unittest +import urllib.error + +from websockets.exceptions import InvalidStatusCode +from websockets.headers import build_authorization_basic +from websockets.legacy.auth import * +from websockets.legacy.auth import is_credentials + +from .test_client_server import ClientServerTestsMixin, with_client, with_server +from .utils import AsyncioTestCase + + +class AuthTests(unittest.TestCase): + def test_is_credentials(self): + self.assertTrue(is_credentials(("username", "password"))) + + def test_is_not_credentials(self): + self.assertFalse(is_credentials(None)) + self.assertFalse(is_credentials("username")) + + +class CustomWebSocketServerProtocol(BasicAuthWebSocketServerProtocol): + async def process_request(self, path, request_headers): + type(self).used = True + return await super().process_request(path, request_headers) + + +class AuthClientServerTests(ClientServerTestsMixin, AsyncioTestCase): + + create_protocol = basic_auth_protocol_factory( + realm="auth-tests", credentials=("hello", "iloveyou") + ) + + @with_server(create_protocol=create_protocol) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth(self): + req_headers = self.client.request_headers + resp_headers = self.client.response_headers + self.assertEqual(req_headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=") + self.assertNotIn("WWW-Authenticate", resp_headers) + + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + def test_basic_auth_server_no_credentials(self): + with self.assertRaises(TypeError) as raised: + basic_auth_protocol_factory(realm="auth-tests", credentials=None) + self.assertEqual( + str(raised.exception), "provide either credentials or check_credentials" + ) + + def test_basic_auth_server_bad_credentials(self): + with self.assertRaises(TypeError) as raised: + basic_auth_protocol_factory(realm="auth-tests", credentials=42) + self.assertEqual(str(raised.exception), "invalid credentials argument: 42") + + create_protocol_multiple_credentials = basic_auth_protocol_factory( + realm="auth-tests", + credentials=[("hello", "iloveyou"), ("goodbye", "stillloveu")], + ) + + @with_server(create_protocol=create_protocol_multiple_credentials) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_server_multiple_credentials(self): + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + def test_basic_auth_bad_multiple_credentials(self): + with self.assertRaises(TypeError) as raised: + basic_auth_protocol_factory( + realm="auth-tests", credentials=[("hello", "iloveyou"), 42] + ) + self.assertEqual( + str(raised.exception), + "invalid credentials argument: [('hello', 'iloveyou'), 42]", + ) + + async def check_credentials(username, password): + return password == "iloveyou" + + create_protocol_check_credentials = basic_auth_protocol_factory( + realm="auth-tests", + check_credentials=check_credentials, + ) + + @with_server(create_protocol=create_protocol_check_credentials) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_check_credentials(self): + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + create_protocol_custom_protocol = basic_auth_protocol_factory( + realm="auth-tests", + credentials=[("hello", "iloveyou")], + create_protocol=CustomWebSocketServerProtocol, + ) + + @with_server(create_protocol=create_protocol_custom_protocol) + @with_client(user_info=("hello", "iloveyou")) + def test_basic_auth_custom_protocol(self): + self.assertTrue(CustomWebSocketServerProtocol.used) + del CustomWebSocketServerProtocol.used + self.loop.run_until_complete(self.client.send("Hello!")) + self.loop.run_until_complete(self.client.recv()) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_missing_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client() + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_missing_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + self.loop.run_until_complete(self.make_http_request()) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) + self.assertEqual(raised.exception.read().decode(), "Missing credentials\n") + + @with_server(create_protocol=create_protocol) + def test_basic_auth_unsupported_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client(extra_headers={"Authorization": "Digest ..."}) + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_unsupported_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + self.loop.run_until_complete( + self.make_http_request(headers={"Authorization": "Digest ..."}) + ) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) + self.assertEqual(raised.exception.read().decode(), "Unsupported credentials\n") + + @with_server(create_protocol=create_protocol) + def test_basic_auth_invalid_credentials(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client(user_info=("hello", "ihateyou")) + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_invalid_credentials_details(self): + with self.assertRaises(urllib.error.HTTPError) as raised: + authorization = build_authorization_basic("hello", "ihateyou") + self.loop.run_until_complete( + self.make_http_request(headers={"Authorization": authorization}) + ) + self.assertEqual(raised.exception.code, 401) + self.assertEqual( + raised.exception.headers["WWW-Authenticate"], + 'Basic realm="auth-tests", charset="UTF-8"', + ) + self.assertEqual(raised.exception.read().decode(), "Invalid credentials\n") diff --git a/tests/test_asyncio_client_server.py b/tests/legacy/test_client_server.py similarity index 97% rename from tests/test_asyncio_client_server.py rename to tests/legacy/test_client_server.py index 76c29334e..499ea1d59 100644 --- a/tests/test_asyncio_client_server.py +++ b/tests/legacy/test_client_server.py @@ -13,8 +13,6 @@ import urllib.request import warnings -from websockets.asyncio_client import * -from websockets.asyncio_server import * from websockets.datastructures import Headers from websockets.exceptions import ( ConnectionClosed, @@ -28,19 +26,20 @@ PerMessageDeflate, ServerPerMessageDeflateFactory, ) -from websockets.handshake_legacy import build_response from websockets.http import USER_AGENT -from websockets.http_legacy import read_response -from websockets.protocol import State +from websockets.legacy.client import * +from websockets.legacy.handshake import build_response +from websockets.legacy.http import read_response +from websockets.legacy.protocol import State +from websockets.legacy.server import * from websockets.uri import parse_uri -from .extensions.test_base import ( +from ..extensions.test_base import ( ClientNoOpExtensionFactory, NoOpExtension, ServerNoOpExtensionFactory, ) -from .test_protocol import MS -from .utils import AsyncioTestCase +from .utils import MS, AsyncioTestCase # Generate TLS certificate with: @@ -49,7 +48,7 @@ # $ cat test_localhost.key test_localhost.crt > test_localhost.pem # $ rm test_localhost.key test_localhost.crt -testcert = bytes(pathlib.Path(__file__).with_name("test_localhost.pem")) +testcert = bytes(pathlib.Path(__file__).parent.with_name("test_localhost.pem")) async def handler(ws, path): @@ -1016,7 +1015,7 @@ def test_subprotocol_error_two_subprotocols(self, _process_subprotocol): self.run_loop_once() @with_server() - @unittest.mock.patch("websockets.asyncio_server.read_request") + @unittest.mock.patch("websockets.legacy.server.read_request") def test_server_receives_malformed_request(self, _read_request): _read_request.side_effect = ValueError("read_request failed") @@ -1024,7 +1023,7 @@ def test_server_receives_malformed_request(self, _read_request): self.start_client() @with_server() - @unittest.mock.patch("websockets.asyncio_client.read_response") + @unittest.mock.patch("websockets.legacy.client.read_response") def test_client_receives_malformed_response(self, _read_response): _read_response.side_effect = ValueError("read_response failed") @@ -1033,7 +1032,7 @@ def test_client_receives_malformed_response(self, _read_response): self.run_loop_once() @with_server() - @unittest.mock.patch("websockets.asyncio_client.build_request") + @unittest.mock.patch("websockets.legacy.client.build_request") def test_client_sends_invalid_handshake_request(self, _build_request): def wrong_build_request(headers): return "42" @@ -1044,7 +1043,7 @@ def wrong_build_request(headers): self.start_client() @with_server() - @unittest.mock.patch("websockets.asyncio_server.build_response") + @unittest.mock.patch("websockets.legacy.server.build_response") def test_server_sends_invalid_handshake_response(self, _build_response): def wrong_build_response(headers, key): return build_response(headers, "42") @@ -1055,7 +1054,7 @@ def wrong_build_response(headers, key): self.start_client() @with_server() - @unittest.mock.patch("websockets.asyncio_client.read_response") + @unittest.mock.patch("websockets.legacy.client.read_response") def test_server_does_not_switch_protocols(self, _read_response): async def wrong_read_response(stream): status_code, reason, headers = await read_response(stream) @@ -1069,7 +1068,7 @@ async def wrong_read_response(stream): @with_server() @unittest.mock.patch( - "websockets.asyncio_server.WebSocketServerProtocol.process_request" + "websockets.legacy.server.WebSocketServerProtocol.process_request" ) def test_server_error_in_handshake(self, _process_request): _process_request.side_effect = Exception("process_request crashed") @@ -1078,7 +1077,7 @@ def test_server_error_in_handshake(self, _process_request): self.start_client() @with_server() - @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.send") + @unittest.mock.patch("websockets.legacy.server.WebSocketServerProtocol.send") def test_server_handler_crashes(self, send): send.side_effect = ValueError("send failed") @@ -1091,7 +1090,7 @@ def test_server_handler_crashes(self, send): self.assertEqual(self.client.close_code, 1011) @with_server() - @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.close") + @unittest.mock.patch("websockets.legacy.server.WebSocketServerProtocol.close") def test_server_close_crashes(self, close): close.side_effect = ValueError("close failed") @@ -1164,10 +1163,10 @@ def test_invalid_status_error_during_client_connect(self): @with_server() @unittest.mock.patch( - "websockets.server.WebSocketServerProtocol.write_http_response" + "websockets.legacy.server.WebSocketServerProtocol.write_http_response" ) @unittest.mock.patch( - "websockets.asyncio_server.WebSocketServerProtocol.read_http_request" + "websockets.legacy.server.WebSocketServerProtocol.read_http_request" ) def test_connection_error_during_opening_handshake( self, _read_http_request, _write_http_response @@ -1186,7 +1185,7 @@ def test_connection_error_during_opening_handshake( _write_http_response.assert_not_called() @with_server() - @unittest.mock.patch("websockets.asyncio_server.WebSocketServerProtocol.close") + @unittest.mock.patch("websockets.legacy.server.WebSocketServerProtocol.close") def test_connection_error_during_closing_handshake(self, close): close.side_effect = ConnectionError diff --git a/tests/legacy/test_framing.py b/tests/legacy/test_framing.py new file mode 100644 index 000000000..ac870c79e --- /dev/null +++ b/tests/legacy/test_framing.py @@ -0,0 +1,171 @@ +import asyncio +import codecs +import unittest +import unittest.mock +import warnings + +from websockets.exceptions import PayloadTooBig, ProtocolError +from websockets.frames import OP_BINARY, OP_CLOSE, OP_PING, OP_PONG, OP_TEXT +from websockets.legacy.framing import * + +from .utils import AsyncioTestCase + + +class FramingTests(AsyncioTestCase): + def decode(self, message, mask=False, max_size=None, extensions=None): + stream = asyncio.StreamReader(loop=self.loop) + stream.feed_data(message) + stream.feed_eof() + with warnings.catch_warnings(record=True): + frame = self.loop.run_until_complete( + Frame.read( + stream.readexactly, + mask=mask, + max_size=max_size, + extensions=extensions, + ) + ) + # Make sure all the data was consumed. + self.assertTrue(stream.at_eof()) + return frame + + def encode(self, frame, mask=False, extensions=None): + write = unittest.mock.Mock() + with warnings.catch_warnings(record=True): + frame.write(write, mask=mask, extensions=extensions) + # Ensure the entire frame is sent with a single call to write(). + # Multiple calls cause TCP fragmentation and degrade performance. + self.assertEqual(write.call_count, 1) + # The frame data is the single positional argument of that call. + self.assertEqual(len(write.call_args[0]), 1) + self.assertEqual(len(write.call_args[1]), 0) + return write.call_args[0][0] + + def round_trip(self, message, expected, mask=False, extensions=None): + decoded = self.decode(message, mask, extensions=extensions) + self.assertEqual(decoded, expected) + encoded = self.encode(decoded, mask, extensions=extensions) + if mask: # non-deterministic encoding + decoded = self.decode(encoded, mask, extensions=extensions) + self.assertEqual(decoded, expected) + else: # deterministic encoding + self.assertEqual(encoded, message) + + def test_text(self): + self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) + + def test_text_masked(self): + self.round_trip( + b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", + Frame(True, OP_TEXT, b"Spam"), + mask=True, + ) + + def test_binary(self): + self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs")) + + def test_binary_masked(self): + self.round_trip( + b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", + Frame(True, OP_BINARY, b"Eggs"), + mask=True, + ) + + def test_non_ascii_text(self): + self.round_trip( + b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8")) + ) + + def test_non_ascii_text_masked(self): + self.round_trip( + b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", + Frame(True, OP_TEXT, "café".encode("utf-8")), + mask=True, + ) + + def test_close(self): + self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b"")) + + def test_ping(self): + self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping")) + + def test_pong(self): + self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong")) + + def test_long(self): + self.round_trip( + b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a") + ) + + def test_very_long(self): + self.round_trip( + b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", + Frame(True, OP_BINARY, 65536 * b"a"), + ) + + def test_payload_too_big(self): + with self.assertRaises(PayloadTooBig): + self.decode(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024) + + def test_bad_reserved_bits(self): + for encoded in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: + with self.subTest(encoded=encoded): + with self.assertRaises(ProtocolError): + self.decode(encoded) + + def test_good_opcode(self): + for opcode in list(range(0x00, 0x03)) + list(range(0x08, 0x0B)): + encoded = bytes([0x80 | opcode, 0]) + with self.subTest(encoded=encoded): + self.decode(encoded) # does not raise an exception + + def test_bad_opcode(self): + for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)): + encoded = bytes([0x80 | opcode, 0]) + with self.subTest(encoded=encoded): + with self.assertRaises(ProtocolError): + self.decode(encoded) + + def test_mask_flag(self): + # Mask flag correctly set. + self.decode(b"\x80\x80\x00\x00\x00\x00", mask=True) + # Mask flag incorrectly unset. + with self.assertRaises(ProtocolError): + self.decode(b"\x80\x80\x00\x00\x00\x00") + # Mask flag correctly unset. + self.decode(b"\x80\x00") + # Mask flag incorrectly set. + with self.assertRaises(ProtocolError): + self.decode(b"\x80\x00", mask=True) + + def test_control_frame_max_length(self): + # At maximum allowed length. + self.decode(b"\x88\x7e\x00\x7d" + 125 * b"a") + # Above maximum allowed length. + with self.assertRaises(ProtocolError): + self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a") + + def test_fragmented_control_frame(self): + # Fin bit correctly set. + self.decode(b"\x88\x00") + # Fin bit incorrectly unset. + with self.assertRaises(ProtocolError): + self.decode(b"\x08\x00") + + def test_extensions(self): + class Rot13: + @staticmethod + def encode(frame): + assert frame.opcode == OP_TEXT + text = frame.data.decode() + data = codecs.encode(text, "rot13").encode() + return frame._replace(data=data) + + # This extensions is symmetrical. + @staticmethod + def decode(frame, *, max_size=None): + return Rot13.encode(frame) + + self.round_trip( + b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()] + ) diff --git a/tests/test_handshake_legacy.py b/tests/legacy/test_handshake.py similarity index 99% rename from tests/test_handshake_legacy.py rename to tests/legacy/test_handshake.py index c34b94e41..661ae64fc 100644 --- a/tests/test_handshake_legacy.py +++ b/tests/legacy/test_handshake.py @@ -8,7 +8,7 @@ InvalidHeaderValue, InvalidUpgrade, ) -from websockets.handshake_legacy import * +from websockets.legacy.handshake import * from websockets.utils import accept_key diff --git a/tests/test_http_legacy.py b/tests/legacy/test_http.py similarity index 98% rename from tests/test_http_legacy.py rename to tests/legacy/test_http.py index e4c75315e..5c9adc97f 100644 --- a/tests/test_http_legacy.py +++ b/tests/legacy/test_http.py @@ -1,8 +1,8 @@ import asyncio from websockets.exceptions import SecurityError -from websockets.http_legacy import * -from websockets.http_legacy import read_headers +from websockets.legacy.http import * +from websockets.legacy.http import read_headers from .utils import AsyncioTestCase diff --git a/tests/legacy/test_protocol.py b/tests/legacy/test_protocol.py new file mode 100644 index 000000000..218d05376 --- /dev/null +++ b/tests/legacy/test_protocol.py @@ -0,0 +1,1489 @@ +import asyncio +import contextlib +import sys +import unittest +import unittest.mock +import warnings + +from websockets.exceptions import ConnectionClosed, InvalidState +from websockets.frames import ( + OP_BINARY, + OP_CLOSE, + OP_CONT, + OP_PING, + OP_PONG, + OP_TEXT, + serialize_close, +) +from websockets.legacy.framing import Frame +from websockets.legacy.protocol import State, WebSocketCommonProtocol + +from .utils import MS, AsyncioTestCase + + +async def async_iterable(iterable): + for item in iterable: + yield item + + +class TransportMock(unittest.mock.Mock): + """ + Transport mock to control the protocol's inputs and outputs in tests. + + It calls the protocol's connection_made and connection_lost methods like + actual transports. + + It also calls the protocol's connection_open method to bypass the + WebSocket handshake. + + To simulate incoming data, tests call the protocol's data_received and + eof_received methods directly. + + They could also pause_writing and resume_writing to test flow control. + + """ + + # This should happen in __init__ but overriding Mock.__init__ is hard. + def setup_mock(self, loop, protocol): + self.loop = loop + self.protocol = protocol + self._eof = False + self._closing = False + # Simulate a successful TCP handshake. + self.protocol.connection_made(self) + # Simulate a successful WebSocket handshake. + self.protocol.connection_open() + + def can_write_eof(self): + return True + + def write_eof(self): + # When the protocol half-closes the TCP connection, it expects the + # other end to close it. Simulate that. + if not self._eof: + self.loop.call_soon(self.close) + self._eof = True + + def close(self): + # Simulate how actual transports drop the connection. + if not self._closing: + self.loop.call_soon(self.protocol.connection_lost, None) + self._closing = True + + def abort(self): + # Change this to an `if` if tests call abort() multiple times. + assert self.protocol.state is not State.CLOSED + self.loop.call_soon(self.protocol.connection_lost, None) + + +class CommonTests: + """ + Mixin that defines most tests but doesn't inherit unittest.TestCase. + + Tests are run by the ServerTests and ClientTests subclasses. + + """ + + def setUp(self): + super().setUp() + # Disable pings to make it easier to test what frames are sent exactly. + self.protocol = WebSocketCommonProtocol(ping_interval=None) + self.transport = TransportMock() + self.transport.setup_mock(self.loop, self.protocol) + + def tearDown(self): + self.transport.close() + self.loop.run_until_complete(self.protocol.close()) + super().tearDown() + + # Utilities for writing tests. + + def make_drain_slow(self, delay=MS): + # Process connection_made in order to initialize self.protocol.transport. + self.run_loop_once() + + original_drain = self.protocol._drain + + async def delayed_drain(): + await asyncio.sleep( + delay, loop=self.loop if sys.version_info[:2] < (3, 8) else None + ) + await original_drain() + + self.protocol._drain = delayed_drain + + close_frame = Frame(True, OP_CLOSE, serialize_close(1000, "close")) + local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local")) + remote_close = Frame(True, OP_CLOSE, serialize_close(1000, "remote")) + + def receive_frame(self, frame): + """ + Make the protocol receive a frame. + + """ + write = self.protocol.data_received + mask = not self.protocol.is_client + frame.write(write, mask=mask) + + def receive_eof(self): + """ + Make the protocol receive the end of the data stream. + + Since ``WebSocketCommonProtocol.eof_received`` returns ``None``, an + actual transport would close itself after calling it. This function + emulates that behavior. + + """ + self.protocol.eof_received() + self.loop.call_soon(self.transport.close) + + def receive_eof_if_client(self): + """ + Like receive_eof, but only if this is the client side. + + Since the server is supposed to initiate the termination of the TCP + connection, this method helps making tests work for both sides. + + """ + if self.protocol.is_client: + self.receive_eof() + + def close_connection(self, code=1000, reason="close"): + """ + Execute a closing handshake. + + This puts the connection in the CLOSED state. + + """ + close_frame_data = serialize_close(code, reason) + # Prepare the response to the closing handshake from the remote side. + self.receive_frame(Frame(True, OP_CLOSE, close_frame_data)) + self.receive_eof_if_client() + # Trigger the closing handshake from the local side and complete it. + self.loop.run_until_complete(self.protocol.close(code, reason)) + # Empty the outgoing data stream so we can make assertions later on. + self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) + + assert self.protocol.state is State.CLOSED + + def half_close_connection_local(self, code=1000, reason="close"): + """ + Start a closing handshake but do not complete it. + + The main difference with `close_connection` is that the connection is + left in the CLOSING state until the event loop runs again. + + The current implementation returns a task that must be awaited or + canceled, else asyncio complains about destroying a pending task. + + """ + close_frame_data = serialize_close(code, reason) + # Trigger the closing handshake from the local endpoint. + close_task = self.loop.create_task(self.protocol.close(code, reason)) + self.run_loop_once() # wait_for executes + self.run_loop_once() # write_frame executes + # Empty the outgoing data stream so we can make assertions later on. + self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) + + assert self.protocol.state is State.CLOSING + + # Complete the closing sequence at 1ms intervals so the test can run + # at each point even it goes back to the event loop several times. + self.loop.call_later( + MS, self.receive_frame, Frame(True, OP_CLOSE, close_frame_data) + ) + self.loop.call_later(2 * MS, self.receive_eof_if_client) + + # This task must be awaited or canceled by the caller. + return close_task + + def half_close_connection_remote(self, code=1000, reason="close"): + """ + Receive a closing handshake but do not complete it. + + The main difference with `close_connection` is that the connection is + left in the CLOSING state until the event loop runs again. + + """ + # On the server side, websockets completes the closing handshake and + # closes the TCP connection immediately. Yield to the event loop after + # sending the close frame to run the test while the connection is in + # the CLOSING state. + if not self.protocol.is_client: + self.make_drain_slow() + + close_frame_data = serialize_close(code, reason) + # Trigger the closing handshake from the remote endpoint. + self.receive_frame(Frame(True, OP_CLOSE, close_frame_data)) + self.run_loop_once() # read_frame executes + # Empty the outgoing data stream so we can make assertions later on. + self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) + + assert self.protocol.state is State.CLOSING + + # Complete the closing sequence at 1ms intervals so the test can run + # at each point even it goes back to the event loop several times. + self.loop.call_later(2 * MS, self.receive_eof_if_client) + + def process_invalid_frames(self): + """ + Make the protocol fail quickly after simulating invalid data. + + To achieve this, this function triggers the protocol's eof_received, + which interrupts pending reads waiting for more data. + + """ + self.run_loop_once() + self.receive_eof() + self.loop.run_until_complete(self.protocol.close_connection_task) + + def sent_frames(self): + """ + Read all frames sent to the transport. + + """ + stream = asyncio.StreamReader(loop=self.loop) + + for (data,), kw in self.transport.write.call_args_list: + stream.feed_data(data) + self.transport.write.call_args_list = [] + stream.feed_eof() + + frames = [] + while not stream.at_eof(): + frames.append( + self.loop.run_until_complete( + Frame.read(stream.readexactly, mask=self.protocol.is_client) + ) + ) + return frames + + def last_sent_frame(self): + """ + Read the last frame sent to the transport. + + This method assumes that at most one frame was sent. It raises an + AssertionError otherwise. + + """ + frames = self.sent_frames() + if frames: + assert len(frames) == 1 + return frames[0] + + def assertFramesSent(self, *frames): + self.assertEqual(self.sent_frames(), [Frame(*args) for args in frames]) + + def assertOneFrameSent(self, *args): + self.assertEqual(self.last_sent_frame(), Frame(*args)) + + def assertNoFrameSent(self): + self.assertIsNone(self.last_sent_frame()) + + def assertConnectionClosed(self, code, message): + # The following line guarantees that connection_lost was called. + self.assertEqual(self.protocol.state, State.CLOSED) + # A close frame was received. + self.assertEqual(self.protocol.close_code, code) + self.assertEqual(self.protocol.close_reason, message) + + def assertConnectionFailed(self, code, message): + # The following line guarantees that connection_lost was called. + self.assertEqual(self.protocol.state, State.CLOSED) + # No close frame was received. + self.assertEqual(self.protocol.close_code, 1006) + self.assertEqual(self.protocol.close_reason, "") + # A close frame was sent -- unless the connection was already lost. + if code == 1006: + self.assertNoFrameSent() + else: + self.assertOneFrameSent(True, OP_CLOSE, serialize_close(code, message)) + + @contextlib.contextmanager + def assertCompletesWithin(self, min_time, max_time): + t0 = self.loop.time() + yield + t1 = self.loop.time() + dt = t1 - t0 + self.assertGreaterEqual(dt, min_time, f"Too fast: {dt} < {min_time}") + self.assertLess(dt, max_time, f"Too slow: {dt} >= {max_time}") + + # Test constructor. + + def test_timeout_backwards_compatibility(self): + with warnings.catch_warnings(record=True) as recorded_warnings: + protocol = WebSocketCommonProtocol(timeout=5) + + self.assertEqual(protocol.close_timeout, 5) + + self.assertEqual(len(recorded_warnings), 1) + warning = recorded_warnings[0].message + self.assertEqual(str(warning), "rename timeout to close_timeout") + self.assertEqual(type(warning), DeprecationWarning) + + # Test public attributes. + + def test_local_address(self): + get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) + self.transport.get_extra_info = get_extra_info + + self.assertEqual(self.protocol.local_address, ("host", 4312)) + get_extra_info.assert_called_with("sockname") + + def test_local_address_before_connection(self): + # Emulate the situation before connection_open() runs. + _transport = self.protocol.transport + del self.protocol.transport + try: + self.assertEqual(self.protocol.local_address, None) + finally: + self.protocol.transport = _transport + + def test_remote_address(self): + get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) + self.transport.get_extra_info = get_extra_info + + self.assertEqual(self.protocol.remote_address, ("host", 4312)) + get_extra_info.assert_called_with("peername") + + def test_remote_address_before_connection(self): + # Emulate the situation before connection_open() runs. + _transport = self.protocol.transport + del self.protocol.transport + try: + self.assertEqual(self.protocol.remote_address, None) + finally: + self.protocol.transport = _transport + + def test_open(self): + self.assertTrue(self.protocol.open) + self.close_connection() + self.assertFalse(self.protocol.open) + + def test_closed(self): + self.assertFalse(self.protocol.closed) + self.close_connection() + self.assertTrue(self.protocol.closed) + + def test_wait_closed(self): + wait_closed = self.loop.create_task(self.protocol.wait_closed()) + self.assertFalse(wait_closed.done()) + self.close_connection() + self.assertTrue(wait_closed.done()) + + # Test the recv coroutine. + + def test_recv_text(self): + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café") + + def test_recv_binary(self): + self.receive_frame(Frame(True, OP_BINARY, b"tea")) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, b"tea") + + def test_recv_on_closing_connection_local(self): + close_task = self.half_close_connection_local() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.recv()) + + self.loop.run_until_complete(close_task) # cleanup + + def test_recv_on_closing_connection_remote(self): + self.half_close_connection_remote() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.recv()) + + def test_recv_on_closed_connection(self): + self.close_connection() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.recv()) + + def test_recv_protocol_error(self): + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8"))) + self.process_invalid_frames() + self.assertConnectionFailed(1002, "") + + def test_recv_unicode_error(self): + self.receive_frame(Frame(True, OP_TEXT, "café".encode("latin-1"))) + self.process_invalid_frames() + self.assertConnectionFailed(1007, "") + + def test_recv_text_payload_too_big(self): + self.protocol.max_size = 1024 + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) + self.process_invalid_frames() + self.assertConnectionFailed(1009, "") + + def test_recv_binary_payload_too_big(self): + self.protocol.max_size = 1024 + self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) + self.process_invalid_frames() + self.assertConnectionFailed(1009, "") + + def test_recv_text_no_max_size(self): + self.protocol.max_size = None # for test coverage + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café" * 205) + + def test_recv_binary_no_max_size(self): + self.protocol.max_size = None # for test coverage + self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, b"tea" * 342) + + def test_recv_queue_empty(self): + recv = self.loop.create_task(self.protocol.recv()) + with self.assertRaises(asyncio.TimeoutError): + self.loop.run_until_complete( + asyncio.wait_for(asyncio.shield(recv), timeout=MS) + ) + + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + data = self.loop.run_until_complete(recv) + self.assertEqual(data, "café") + + def test_recv_queue_full(self): + self.protocol.max_queue = 2 + # Test internals because it's hard to verify buffers from the outside. + self.assertEqual(list(self.protocol.messages), []) + + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), ["café"]) + + self.receive_frame(Frame(True, OP_BINARY, b"tea")) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) + + self.receive_frame(Frame(True, OP_BINARY, b"milk")) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) + + self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), [b"tea", b"milk"]) + + self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), [b"milk"]) + + self.loop.run_until_complete(self.protocol.recv()) + self.run_loop_once() + self.assertEqual(list(self.protocol.messages), []) + + def test_recv_queue_no_limit(self): + self.protocol.max_queue = None + + for _ in range(100): + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + self.run_loop_once() + + # Incoming message queue can contain at least 100 messages. + self.assertEqual(list(self.protocol.messages), ["café"] * 100) + + for _ in range(100): + self.loop.run_until_complete(self.protocol.recv()) + + self.assertEqual(list(self.protocol.messages), []) + + def test_recv_other_error(self): + async def read_message(): + raise Exception("BOOM") + + self.protocol.read_message = read_message + self.process_invalid_frames() + self.assertConnectionFailed(1011, "") + + def test_recv_canceled(self): + recv = self.loop.create_task(self.protocol.recv()) + self.loop.call_soon(recv.cancel) + + with self.assertRaises(asyncio.CancelledError): + self.loop.run_until_complete(recv) + + # The next frame doesn't disappear in a vacuum (it used to). + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café") + + def test_recv_canceled_race_condition(self): + recv = self.loop.create_task( + asyncio.wait_for(self.protocol.recv(), timeout=0.000_001) + ) + self.loop.call_soon( + self.receive_frame, Frame(True, OP_TEXT, "café".encode("utf-8")) + ) + + with self.assertRaises(asyncio.TimeoutError): + self.loop.run_until_complete(recv) + + # The previous frame doesn't disappear in a vacuum (it used to). + self.receive_frame(Frame(True, OP_TEXT, "tea".encode("utf-8"))) + data = self.loop.run_until_complete(self.protocol.recv()) + # If we're getting "tea" there, it means "café" was swallowed (ha, ha). + self.assertEqual(data, "café") + + def test_recv_when_transfer_data_cancelled(self): + # Clog incoming queue. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) + self.receive_frame(Frame(True, OP_BINARY, b"tea")) + self.run_loop_once() + + # Flow control kicks in (check with an implementation detail). + self.assertFalse(self.protocol._put_message_waiter.done()) + + # Schedule recv(). + recv = self.loop.create_task(self.protocol.recv()) + + # Cancel transfer_data_task (again, implementation detail). + self.protocol.fail_connection() + self.run_loop_once() + self.assertTrue(self.protocol.transfer_data_task.cancelled()) + + # recv() completes properly. + self.assertEqual(self.loop.run_until_complete(recv), "café") + + def test_recv_prevents_concurrent_calls(self): + recv = self.loop.create_task(self.protocol.recv()) + + with self.assertRaises(RuntimeError) as raised: + self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual( + str(raised.exception), + "cannot call recv while another coroutine " + "is already waiting for the next message", + ) + recv.cancel() + + # Test the send coroutine. + + def test_send_text(self): + self.loop.run_until_complete(self.protocol.send("café")) + self.assertOneFrameSent(True, OP_TEXT, "café".encode("utf-8")) + + def test_send_binary(self): + self.loop.run_until_complete(self.protocol.send(b"tea")) + self.assertOneFrameSent(True, OP_BINARY, b"tea") + + def test_send_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.send(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_BINARY, b"tea") + + def test_send_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.send(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_BINARY, b"tea") + + def test_send_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_BINARY, b"tea") + + def test_send_dict(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send({"not": "encoded"})) + self.assertNoFrameSent() + + def test_send_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send(42)) + self.assertNoFrameSent() + + def test_send_iterable_text(self): + self.loop.run_until_complete(self.protocol.send(["ca", "fé"])) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + ) + + def test_send_iterable_binary(self): + self.loop.run_until_complete(self.protocol.send([b"te", b"a"])) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_iterable_binary_from_bytearray(self): + self.loop.run_until_complete( + self.protocol.send([bytearray(b"te"), bytearray(b"a")]) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_iterable_binary_from_memoryview(self): + self.loop.run_until_complete( + self.protocol.send([memoryview(b"te"), memoryview(b"a")]) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_iterable_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete( + self.protocol.send([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_empty_iterable(self): + self.loop.run_until_complete(self.protocol.send([])) + self.assertNoFrameSent() + + def test_send_iterable_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send([42])) + self.assertNoFrameSent() + + def test_send_iterable_mixed_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send(["café", b"tea"])) + self.assertFramesSent( + (False, OP_TEXT, "café".encode("utf-8")), + (True, OP_CLOSE, serialize_close(1011, "")), + ) + + def test_send_iterable_prevents_concurrent_send(self): + self.make_drain_slow(2 * MS) + + async def send_iterable(): + await self.protocol.send(["ca", "fé"]) + + async def send_concurrent(): + await asyncio.sleep(MS) + await self.protocol.send(b"tea") + + self.loop.run_until_complete(asyncio.gather(send_iterable(), send_concurrent())) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + (True, OP_BINARY, b"tea"), + ) + + def test_send_async_iterable_text(self): + self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"]))) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + ) + + def test_send_async_iterable_binary(self): + self.loop.run_until_complete(self.protocol.send(async_iterable([b"te", b"a"]))) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_bytearray(self): + self.loop.run_until_complete( + self.protocol.send(async_iterable([bytearray(b"te"), bytearray(b"a")])) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_memoryview(self): + self.loop.run_until_complete( + self.protocol.send(async_iterable([memoryview(b"te"), memoryview(b"a")])) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_async_iterable_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete( + self.protocol.send( + async_iterable([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) + ) + ) + self.assertFramesSent( + (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") + ) + + def test_send_empty_async_iterable(self): + self.loop.run_until_complete(self.protocol.send(async_iterable([]))) + self.assertNoFrameSent() + + def test_send_async_iterable_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.send(async_iterable([42]))) + self.assertNoFrameSent() + + def test_send_async_iterable_mixed_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete( + self.protocol.send(async_iterable(["café", b"tea"])) + ) + self.assertFramesSent( + (False, OP_TEXT, "café".encode("utf-8")), + (True, OP_CLOSE, serialize_close(1011, "")), + ) + + def test_send_async_iterable_prevents_concurrent_send(self): + self.make_drain_slow(2 * MS) + + async def send_async_iterable(): + await self.protocol.send(async_iterable(["ca", "fé"])) + + async def send_concurrent(): + await asyncio.sleep(MS) + await self.protocol.send(b"tea") + + self.loop.run_until_complete( + asyncio.gather(send_async_iterable(), send_concurrent()) + ) + self.assertFramesSent( + (False, OP_TEXT, "ca".encode("utf-8")), + (False, OP_CONT, "fé".encode("utf-8")), + (True, OP_CONT, "".encode("utf-8")), + (True, OP_BINARY, b"tea"), + ) + + def test_send_on_closing_connection_local(self): + close_task = self.half_close_connection_local() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.send("foobar")) + + self.assertNoFrameSent() + + self.loop.run_until_complete(close_task) # cleanup + + def test_send_on_closing_connection_remote(self): + self.half_close_connection_remote() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.send("foobar")) + + self.assertNoFrameSent() + + def test_send_on_closed_connection(self): + self.close_connection() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.send("foobar")) + + self.assertNoFrameSent() + + # Test the ping coroutine. + + def test_ping_default(self): + self.loop.run_until_complete(self.protocol.ping()) + # With our testing tools, it's more convenient to extract the expected + # ping data from the library's internals than from the frame sent. + ping_data = next(iter(self.protocol.pings)) + self.assertIsInstance(ping_data, bytes) + self.assertEqual(len(ping_data), 4) + self.assertOneFrameSent(True, OP_PING, ping_data) + + def test_ping_text(self): + self.loop.run_until_complete(self.protocol.ping("café")) + self.assertOneFrameSent(True, OP_PING, "café".encode("utf-8")) + + def test_ping_binary(self): + self.loop.run_until_complete(self.protocol.ping(b"tea")) + self.assertOneFrameSent(True, OP_PING, b"tea") + + def test_ping_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.ping(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_PING, b"tea") + + def test_ping_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.ping(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_PING, b"tea") + + def test_ping_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.ping(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_PING, b"tea") + + def test_ping_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.ping(42)) + self.assertNoFrameSent() + + def test_ping_on_closing_connection_local(self): + close_task = self.half_close_connection_local() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.ping()) + + self.assertNoFrameSent() + + self.loop.run_until_complete(close_task) # cleanup + + def test_ping_on_closing_connection_remote(self): + self.half_close_connection_remote() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.ping()) + + self.assertNoFrameSent() + + def test_ping_on_closed_connection(self): + self.close_connection() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.ping()) + + self.assertNoFrameSent() + + # Test the pong coroutine. + + def test_pong_default(self): + self.loop.run_until_complete(self.protocol.pong()) + self.assertOneFrameSent(True, OP_PONG, b"") + + def test_pong_text(self): + self.loop.run_until_complete(self.protocol.pong("café")) + self.assertOneFrameSent(True, OP_PONG, "café".encode("utf-8")) + + def test_pong_binary(self): + self.loop.run_until_complete(self.protocol.pong(b"tea")) + self.assertOneFrameSent(True, OP_PONG, b"tea") + + def test_pong_binary_from_bytearray(self): + self.loop.run_until_complete(self.protocol.pong(bytearray(b"tea"))) + self.assertOneFrameSent(True, OP_PONG, b"tea") + + def test_pong_binary_from_memoryview(self): + self.loop.run_until_complete(self.protocol.pong(memoryview(b"tea"))) + self.assertOneFrameSent(True, OP_PONG, b"tea") + + def test_pong_binary_from_non_contiguous_memoryview(self): + self.loop.run_until_complete(self.protocol.pong(memoryview(b"tteeaa")[::2])) + self.assertOneFrameSent(True, OP_PONG, b"tea") + + def test_pong_type_error(self): + with self.assertRaises(TypeError): + self.loop.run_until_complete(self.protocol.pong(42)) + self.assertNoFrameSent() + + def test_pong_on_closing_connection_local(self): + close_task = self.half_close_connection_local() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.pong()) + + self.assertNoFrameSent() + + self.loop.run_until_complete(close_task) # cleanup + + def test_pong_on_closing_connection_remote(self): + self.half_close_connection_remote() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.pong()) + + self.assertNoFrameSent() + + def test_pong_on_closed_connection(self): + self.close_connection() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.pong()) + + self.assertNoFrameSent() + + # Test the protocol's logic for acknowledging pings with pongs. + + def test_answer_ping(self): + self.receive_frame(Frame(True, OP_PING, b"test")) + self.run_loop_once() + self.assertOneFrameSent(True, OP_PONG, b"test") + + def test_ignore_pong(self): + self.receive_frame(Frame(True, OP_PONG, b"test")) + self.run_loop_once() + self.assertNoFrameSent() + + def test_acknowledge_ping(self): + ping = self.loop.run_until_complete(self.protocol.ping()) + self.assertFalse(ping.done()) + ping_frame = self.last_sent_frame() + pong_frame = Frame(True, OP_PONG, ping_frame.data) + self.receive_frame(pong_frame) + self.run_loop_once() + self.run_loop_once() + self.assertTrue(ping.done()) + + def test_abort_ping(self): + ping = self.loop.run_until_complete(self.protocol.ping()) + # Remove the frame from the buffer, else close_connection() complains. + self.last_sent_frame() + self.assertFalse(ping.done()) + self.close_connection() + self.assertTrue(ping.done()) + self.assertIsInstance(ping.exception(), ConnectionClosed) + + def test_abort_ping_does_not_log_exception_if_not_retreived(self): + self.loop.run_until_complete(self.protocol.ping()) + # Get the internal Future, which isn't directly returned by ping(). + (ping,) = self.protocol.pings.values() + # Remove the frame from the buffer, else close_connection() complains. + self.last_sent_frame() + self.close_connection() + # Check a private attribute, for lack of a better solution. + self.assertFalse(ping._log_traceback) + + def test_acknowledge_previous_pings(self): + pings = [ + (self.loop.run_until_complete(self.protocol.ping()), self.last_sent_frame()) + for i in range(3) + ] + # Unsolicited pong doesn't acknowledge pings + self.receive_frame(Frame(True, OP_PONG, b"")) + self.run_loop_once() + self.run_loop_once() + self.assertFalse(pings[0][0].done()) + self.assertFalse(pings[1][0].done()) + self.assertFalse(pings[2][0].done()) + # Pong acknowledges all previous pings + self.receive_frame(Frame(True, OP_PONG, pings[1][1].data)) + self.run_loop_once() + self.run_loop_once() + self.assertTrue(pings[0][0].done()) + self.assertTrue(pings[1][0].done()) + self.assertFalse(pings[2][0].done()) + + def test_acknowledge_aborted_ping(self): + ping = self.loop.run_until_complete(self.protocol.ping()) + ping_frame = self.last_sent_frame() + # Clog incoming queue. This lets connection_lost() abort pending pings + # with a ConnectionClosed exception before transfer_data_task + # terminates and close_connection cancels keepalive_ping_task. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, b"1")) + self.receive_frame(Frame(True, OP_TEXT, b"2")) + # Add pong frame to the queue. + pong_frame = Frame(True, OP_PONG, ping_frame.data) + self.receive_frame(pong_frame) + # Connection drops. + self.receive_eof() + self.loop.run_until_complete(self.protocol.wait_closed()) + # Ping receives a ConnectionClosed exception. + with self.assertRaises(ConnectionClosed): + ping.result() + + # transfer_data doesn't crash, which would be logged. + with self.assertNoLogs(): + # Unclog incoming queue. + self.loop.run_until_complete(self.protocol.recv()) + self.loop.run_until_complete(self.protocol.recv()) + + def test_canceled_ping(self): + ping = self.loop.run_until_complete(self.protocol.ping()) + ping_frame = self.last_sent_frame() + ping.cancel() + pong_frame = Frame(True, OP_PONG, ping_frame.data) + self.receive_frame(pong_frame) + self.run_loop_once() + self.run_loop_once() + self.assertTrue(ping.cancelled()) + + def test_duplicate_ping(self): + self.loop.run_until_complete(self.protocol.ping(b"foobar")) + self.assertOneFrameSent(True, OP_PING, b"foobar") + with self.assertRaises(ValueError): + self.loop.run_until_complete(self.protocol.ping(b"foobar")) + self.assertNoFrameSent() + + # Test the protocol's logic for rebuilding fragmented messages. + + def test_fragmented_text(self): + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café") + + def test_fragmented_binary(self): + self.receive_frame(Frame(False, OP_BINARY, b"t")) + self.receive_frame(Frame(False, OP_CONT, b"e")) + self.receive_frame(Frame(True, OP_CONT, b"a")) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, b"tea") + + def test_fragmented_text_payload_too_big(self): + self.protocol.max_size = 1024 + self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) + self.process_invalid_frames() + self.assertConnectionFailed(1009, "") + + def test_fragmented_binary_payload_too_big(self): + self.protocol.max_size = 1024 + self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) + self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) + self.process_invalid_frames() + self.assertConnectionFailed(1009, "") + + def test_fragmented_text_no_max_size(self): + self.protocol.max_size = None # for test coverage + self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) + self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café" * 205) + + def test_fragmented_binary_no_max_size(self): + self.protocol.max_size = None # for test coverage + self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) + self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, b"tea" * 342) + + def test_control_frame_within_fragmented_text(self): + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_PING, b"")) + self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) + data = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(data, "café") + self.assertOneFrameSent(True, OP_PONG, b"") + + def test_unterminated_fragmented_text(self): + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + # Missing the second part of the fragmented frame. + self.receive_frame(Frame(True, OP_BINARY, b"tea")) + self.process_invalid_frames() + self.assertConnectionFailed(1002, "") + + def test_close_handshake_in_fragmented_text(self): + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.receive_frame(Frame(True, OP_CLOSE, b"")) + self.process_invalid_frames() + # The RFC may have overlooked this case: it says that control frames + # can be interjected in the middle of a fragmented message and that a + # close frame must be echoed. Even though there's an unterminated + # message, technically, the closing handshake was successful. + self.assertConnectionClosed(1005, "") + + def test_connection_close_in_fragmented_text(self): + self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) + self.process_invalid_frames() + self.assertConnectionFailed(1006, "") + + # Test miscellaneous code paths to ensure full coverage. + + def test_connection_lost(self): + # Test calling connection_lost without going through close_connection. + self.protocol.connection_lost(None) + + self.assertConnectionFailed(1006, "") + + def test_ensure_open_before_opening_handshake(self): + # Simulate a bug by forcibly reverting the protocol state. + self.protocol.state = State.CONNECTING + + with self.assertRaises(InvalidState): + self.loop.run_until_complete(self.protocol.ensure_open()) + + def test_ensure_open_during_unclean_close(self): + # Process connection_made in order to start transfer_data_task. + self.run_loop_once() + + # Ensure the test terminates quickly. + self.loop.call_later(MS, self.receive_eof_if_client) + + # Simulate the case when close() times out sending a close frame. + self.protocol.fail_connection() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.ensure_open()) + + def test_legacy_recv(self): + # By default legacy_recv in disabled. + self.assertEqual(self.protocol.legacy_recv, False) + + self.close_connection() + + # Enable legacy_recv. + self.protocol.legacy_recv = True + + # Now recv() returns None instead of raising ConnectionClosed. + self.assertIsNone(self.loop.run_until_complete(self.protocol.recv())) + + def test_connection_closed_attributes(self): + self.close_connection() + + with self.assertRaises(ConnectionClosed) as context: + self.loop.run_until_complete(self.protocol.recv()) + + connection_closed_exc = context.exception + self.assertEqual(connection_closed_exc.code, 1000) + self.assertEqual(connection_closed_exc.reason, "close") + + # Test the protocol logic for sending keepalive pings. + + def restart_protocol_with_keepalive_ping( + self, ping_interval=3 * MS, ping_timeout=3 * MS + ): + initial_protocol = self.protocol + # copied from tearDown + self.transport.close() + self.loop.run_until_complete(self.protocol.close()) + # copied from setUp, but enables keepalive pings + self.protocol = WebSocketCommonProtocol( + ping_interval=ping_interval, ping_timeout=ping_timeout + ) + self.transport = TransportMock() + self.transport.setup_mock(self.loop, self.protocol) + self.protocol.is_client = initial_protocol.is_client + self.protocol.side = initial_protocol.side + + def test_keepalive_ping(self): + self.restart_protocol_with_keepalive_ping() + + # Ping is sent at 3ms and acknowledged at 4ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + (ping_1,) = tuple(self.protocol.pings) + self.assertOneFrameSent(True, OP_PING, ping_1) + self.receive_frame(Frame(True, OP_PONG, ping_1)) + + # Next ping is sent at 7ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + (ping_2,) = tuple(self.protocol.pings) + self.assertOneFrameSent(True, OP_PING, ping_2) + + # The keepalive ping task goes on. + self.assertFalse(self.protocol.keepalive_ping_task.done()) + + def test_keepalive_ping_not_acknowledged_closes_connection(self): + self.restart_protocol_with_keepalive_ping() + + # Ping is sent at 3ms and not acknowleged. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + (ping_1,) = tuple(self.protocol.pings) + self.assertOneFrameSent(True, OP_PING, ping_1) + + # Connection is closed at 6ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + self.assertOneFrameSent(True, OP_CLOSE, serialize_close(1011, "")) + + # The keepalive ping task is complete. + self.assertEqual(self.protocol.keepalive_ping_task.result(), None) + + def test_keepalive_ping_stops_when_connection_closing(self): + self.restart_protocol_with_keepalive_ping() + close_task = self.half_close_connection_local() + + # No ping sent at 3ms because the closing handshake is in progress. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + self.assertNoFrameSent() + + # The keepalive ping task terminated. + self.assertTrue(self.protocol.keepalive_ping_task.cancelled()) + + self.loop.run_until_complete(close_task) # cleanup + + def test_keepalive_ping_stops_when_connection_closed(self): + self.restart_protocol_with_keepalive_ping() + self.close_connection() + + # The keepalive ping task terminated. + self.assertTrue(self.protocol.keepalive_ping_task.cancelled()) + + def test_keepalive_ping_does_not_crash_when_connection_lost(self): + self.restart_protocol_with_keepalive_ping() + # Clog incoming queue. This lets connection_lost() abort pending pings + # with a ConnectionClosed exception before transfer_data_task + # terminates and close_connection cancels keepalive_ping_task. + self.protocol.max_queue = 1 + self.receive_frame(Frame(True, OP_TEXT, b"1")) + self.receive_frame(Frame(True, OP_TEXT, b"2")) + # Ping is sent at 3ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + (ping_waiter,) = tuple(self.protocol.pings.values()) + # Connection drops. + self.receive_eof() + self.loop.run_until_complete(self.protocol.wait_closed()) + + # The ping waiter receives a ConnectionClosed exception. + with self.assertRaises(ConnectionClosed): + ping_waiter.result() + # The keepalive ping task terminated properly. + self.assertIsNone(self.protocol.keepalive_ping_task.result()) + + # Unclog incoming queue to terminate the test quickly. + self.loop.run_until_complete(self.protocol.recv()) + self.loop.run_until_complete(self.protocol.recv()) + + def test_keepalive_ping_with_no_ping_interval(self): + self.restart_protocol_with_keepalive_ping(ping_interval=None) + + # No ping is sent at 3ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + self.assertNoFrameSent() + + def test_keepalive_ping_with_no_ping_timeout(self): + self.restart_protocol_with_keepalive_ping(ping_timeout=None) + + # Ping is sent at 3ms and not acknowleged. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + (ping_1,) = tuple(self.protocol.pings) + self.assertOneFrameSent(True, OP_PING, ping_1) + + # Next ping is sent at 7ms anyway. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + ping_1_again, ping_2 = tuple(self.protocol.pings) + self.assertEqual(ping_1, ping_1_again) + self.assertOneFrameSent(True, OP_PING, ping_2) + + # The keepalive ping task goes on. + self.assertFalse(self.protocol.keepalive_ping_task.done()) + + def test_keepalive_ping_unexpected_error(self): + self.restart_protocol_with_keepalive_ping() + + async def ping(): + raise Exception("BOOM") + + self.protocol.ping = ping + + # The keepalive ping task fails when sending a ping at 3ms. + self.loop.run_until_complete(asyncio.sleep(4 * MS)) + + # The keepalive ping task is complete. + # It logs and swallows the exception. + self.assertEqual(self.protocol.keepalive_ping_task.result(), None) + + # Test the protocol logic for closing the connection. + + def test_local_close(self): + # Emulate how the remote endpoint answers the closing handshake. + self.loop.call_later(MS, self.receive_frame, self.close_frame) + self.loop.call_later(MS, self.receive_eof_if_client) + + # Run the closing handshake. + self.loop.run_until_complete(self.protocol.close(reason="close")) + + self.assertConnectionClosed(1000, "close") + self.assertOneFrameSent(*self.close_frame) + + # Closing the connection again is a no-op. + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) + + self.assertConnectionClosed(1000, "close") + self.assertNoFrameSent() + + def test_remote_close(self): + # Emulate how the remote endpoint initiates the closing handshake. + self.loop.call_later(MS, self.receive_frame, self.close_frame) + self.loop.call_later(MS, self.receive_eof_if_client) + + # Wait for some data in order to process the handshake. + # After recv() raises ConnectionClosed, the connection is closed. + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(self.protocol.recv()) + + self.assertConnectionClosed(1000, "close") + self.assertOneFrameSent(*self.close_frame) + + # Closing the connection again is a no-op. + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) + + self.assertConnectionClosed(1000, "close") + self.assertNoFrameSent() + + def test_remote_close_and_connection_lost(self): + self.make_drain_slow() + # Drop the connection right after receiving a close frame, + # which prevents echoing the close frame properly. + self.receive_frame(self.close_frame) + self.receive_eof() + + with self.assertNoLogs(): + self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) + + self.assertConnectionClosed(1000, "close") + self.assertOneFrameSent(*self.close_frame) + + def test_simultaneous_close(self): + # Receive the incoming close frame right after self.protocol.close() + # starts executing. This reproduces the error described in: + # https://github.com/aaugustin/websockets/issues/339 + self.loop.call_soon(self.receive_frame, self.remote_close) + self.loop.call_soon(self.receive_eof_if_client) + + self.loop.run_until_complete(self.protocol.close(reason="local")) + + self.assertConnectionClosed(1000, "remote") + # The current implementation sends a close frame in response to the + # close frame received from the remote end. It skips the close frame + # that should be sent as a result of calling close(). + self.assertOneFrameSent(*self.remote_close) + + def test_close_preserves_incoming_frames(self): + self.receive_frame(Frame(True, OP_TEXT, b"hello")) + + self.loop.call_later(MS, self.receive_frame, self.close_frame) + self.loop.call_later(MS, self.receive_eof_if_client) + self.loop.run_until_complete(self.protocol.close(reason="close")) + + self.assertConnectionClosed(1000, "close") + self.assertOneFrameSent(*self.close_frame) + + next_message = self.loop.run_until_complete(self.protocol.recv()) + self.assertEqual(next_message, "hello") + + def test_close_protocol_error(self): + invalid_close_frame = Frame(True, OP_CLOSE, b"\x00") + self.receive_frame(invalid_close_frame) + self.receive_eof_if_client() + self.run_loop_once() + self.loop.run_until_complete(self.protocol.close(reason="close")) + + self.assertConnectionFailed(1002, "") + + def test_close_connection_lost(self): + self.receive_eof() + self.run_loop_once() + self.loop.run_until_complete(self.protocol.close(reason="close")) + + self.assertConnectionFailed(1006, "") + + def test_local_close_during_recv(self): + recv = self.loop.create_task(self.protocol.recv()) + + self.loop.call_later(MS, self.receive_frame, self.close_frame) + self.loop.call_later(MS, self.receive_eof_if_client) + + self.loop.run_until_complete(self.protocol.close(reason="close")) + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(recv) + + self.assertConnectionClosed(1000, "close") + + # There is no test_remote_close_during_recv because it would be identical + # to test_remote_close. + + def test_remote_close_during_send(self): + self.make_drain_slow() + send = self.loop.create_task(self.protocol.send("hello")) + + self.receive_frame(self.close_frame) + self.receive_eof() + + with self.assertRaises(ConnectionClosed): + self.loop.run_until_complete(send) + + self.assertConnectionClosed(1000, "close") + + # There is no test_local_close_during_send because this cannot really + # happen, considering that writes are serialized. + + +class ServerTests(CommonTests, AsyncioTestCase): + def setUp(self): + super().setUp() + self.protocol.is_client = False + self.protocol.side = "server" + + def test_local_close_send_close_frame_timeout(self): + self.protocol.close_timeout = 10 * MS + self.make_drain_slow(50 * MS) + # If we can't send a close frame, time out in 10ms. + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(9 * MS, 19 * MS): + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") + + def test_local_close_receive_close_frame_timeout(self): + self.protocol.close_timeout = 10 * MS + # If the client doesn't send a close frame, time out in 10ms. + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(9 * MS, 19 * MS): + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") + + def test_local_close_connection_lost_timeout_after_write_eof(self): + self.protocol.close_timeout = 10 * MS + # If the client doesn't close its side of the TCP connection after we + # half-close our side with write_eof(), time out in 10ms. + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(9 * MS, 19 * MS): + # HACK: disable write_eof => other end drops connection emulation. + self.transport._eof = True + self.receive_frame(self.close_frame) + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") + + def test_local_close_connection_lost_timeout_after_close(self): + self.protocol.close_timeout = 10 * MS + # If the client doesn't close its side of the TCP connection after we + # half-close our side with write_eof() and close it with close(), time + # out in 20ms. + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(19 * MS, 29 * MS): + # HACK: disable write_eof => other end drops connection emulation. + self.transport._eof = True + # HACK: disable close => other end drops connection emulation. + self.transport._closing = True + self.receive_frame(self.close_frame) + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") + + +class ClientTests(CommonTests, AsyncioTestCase): + def setUp(self): + super().setUp() + self.protocol.is_client = True + self.protocol.side = "client" + + def test_local_close_send_close_frame_timeout(self): + self.protocol.close_timeout = 10 * MS + self.make_drain_slow(50 * MS) + # If we can't send a close frame, time out in 20ms. + # - 10ms waiting for sending a close frame + # - 10ms waiting for receiving a half-close + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(19 * MS, 29 * MS): + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") + + def test_local_close_receive_close_frame_timeout(self): + self.protocol.close_timeout = 10 * MS + # If the server doesn't send a close frame, time out in 20ms: + # - 10ms waiting for receiving a close frame + # - 10ms waiting for receiving a half-close + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(19 * MS, 29 * MS): + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1006, "") + + def test_local_close_connection_lost_timeout_after_write_eof(self): + self.protocol.close_timeout = 10 * MS + # If the server doesn't half-close its side of the TCP connection + # after we send a close frame, time out in 20ms: + # - 10ms waiting for receiving a half-close + # - 10ms waiting for receiving a close after write_eof + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(19 * MS, 29 * MS): + # HACK: disable write_eof => other end drops connection emulation. + self.transport._eof = True + self.receive_frame(self.close_frame) + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") + + def test_local_close_connection_lost_timeout_after_close(self): + self.protocol.close_timeout = 10 * MS + # If the client doesn't close its side of the TCP connection after we + # half-close our side with write_eof() and close it with close(), time + # out in 20ms. + # - 10ms waiting for receiving a half-close + # - 10ms waiting for receiving a close after write_eof + # - 10ms waiting for receiving a close after close + # Check the timing within -1/+9ms for robustness. + with self.assertCompletesWithin(29 * MS, 39 * MS): + # HACK: disable write_eof => other end drops connection emulation. + self.transport._eof = True + # HACK: disable close => other end drops connection emulation. + self.transport._closing = True + self.receive_frame(self.close_frame) + self.loop.run_until_complete(self.protocol.close(reason="close")) + self.assertConnectionClosed(1000, "close") diff --git a/tests/legacy/utils.py b/tests/legacy/utils.py new file mode 100644 index 000000000..983a91edf --- /dev/null +++ b/tests/legacy/utils.py @@ -0,0 +1,93 @@ +import asyncio +import contextlib +import functools +import logging +import os +import time +import unittest + + +class AsyncioTestCase(unittest.TestCase): + """ + Base class for tests that sets up an isolated event loop for each test. + + """ + + def __init_subclass__(cls, **kwargs): + """ + Convert test coroutines to test functions. + + This supports asychronous tests transparently. + + """ + super().__init_subclass__(**kwargs) + for name in unittest.defaultTestLoader.getTestCaseNames(cls): + test = getattr(cls, name) + if asyncio.iscoroutinefunction(test): + setattr(cls, name, cls.convert_async_to_sync(test)) + + @staticmethod + def convert_async_to_sync(test): + """ + Convert a test coroutine to a test function. + + """ + + @functools.wraps(test) + def test_func(self, *args, **kwargs): + return self.loop.run_until_complete(test(self, *args, **kwargs)) + + return test_func + + def setUp(self): + super().setUp() + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + def tearDown(self): + self.loop.close() + super().tearDown() + + def run_loop_once(self): + # Process callbacks scheduled with call_soon by appending a callback + # to stop the event loop then running it until it hits that callback. + self.loop.call_soon(self.loop.stop) + self.loop.run_forever() + + @contextlib.contextmanager + def assertNoLogs(self, logger="websockets", level=logging.ERROR): + """ + No message is logged on the given logger with at least the given level. + + """ + with self.assertLogs(logger, level) as logs: + # We want to test that no log message is emitted + # but assertLogs expects at least one log message. + logging.getLogger(logger).log(level, "dummy") + yield + + level_name = logging.getLevelName(level) + self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"]) + + def assertDeprecationWarnings(self, recorded_warnings, expected_warnings): + """ + Check recorded deprecation warnings match a list of expected messages. + + """ + self.assertEqual(len(recorded_warnings), len(expected_warnings)) + for recorded, expected in zip(recorded_warnings, expected_warnings): + actual = recorded.message + self.assertEqual(str(actual), expected) + self.assertEqual(type(actual), DeprecationWarning) + + +# Unit for timeouts. May be increased on slow machines by setting the +# WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. +MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) + +# asyncio's debug mode has a 10x performance penalty for this test suite. +if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover + MS *= 10 + +# Ensure that timeouts are larger than the clock's resolution (for Windows). +MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) diff --git a/tests/test_auth.py b/tests/test_auth.py index ce23f913d..01ca207c7 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,160 +1,2 @@ -import unittest -import urllib.error - -from websockets.auth import * -from websockets.auth import is_credentials -from websockets.exceptions import InvalidStatusCode -from websockets.headers import build_authorization_basic - -from .test_asyncio_client_server import ClientServerTestsMixin, with_client, with_server -from .utils import AsyncioTestCase - - -class AuthTests(unittest.TestCase): - def test_is_credentials(self): - self.assertTrue(is_credentials(("username", "password"))) - - def test_is_not_credentials(self): - self.assertFalse(is_credentials(None)) - self.assertFalse(is_credentials("username")) - - -class CustomWebSocketServerProtocol(BasicAuthWebSocketServerProtocol): - async def process_request(self, path, request_headers): - type(self).used = True - return await super().process_request(path, request_headers) - - -class AuthClientServerTests(ClientServerTestsMixin, AsyncioTestCase): - - create_protocol = basic_auth_protocol_factory( - realm="auth-tests", credentials=("hello", "iloveyou") - ) - - @with_server(create_protocol=create_protocol) - @with_client(user_info=("hello", "iloveyou")) - def test_basic_auth(self): - req_headers = self.client.request_headers - resp_headers = self.client.response_headers - self.assertEqual(req_headers["Authorization"], "Basic aGVsbG86aWxvdmV5b3U=") - self.assertNotIn("WWW-Authenticate", resp_headers) - - self.loop.run_until_complete(self.client.send("Hello!")) - self.loop.run_until_complete(self.client.recv()) - - def test_basic_auth_server_no_credentials(self): - with self.assertRaises(TypeError) as raised: - basic_auth_protocol_factory(realm="auth-tests", credentials=None) - self.assertEqual( - str(raised.exception), "provide either credentials or check_credentials" - ) - - def test_basic_auth_server_bad_credentials(self): - with self.assertRaises(TypeError) as raised: - basic_auth_protocol_factory(realm="auth-tests", credentials=42) - self.assertEqual(str(raised.exception), "invalid credentials argument: 42") - - create_protocol_multiple_credentials = basic_auth_protocol_factory( - realm="auth-tests", - credentials=[("hello", "iloveyou"), ("goodbye", "stillloveu")], - ) - - @with_server(create_protocol=create_protocol_multiple_credentials) - @with_client(user_info=("hello", "iloveyou")) - def test_basic_auth_server_multiple_credentials(self): - self.loop.run_until_complete(self.client.send("Hello!")) - self.loop.run_until_complete(self.client.recv()) - - def test_basic_auth_bad_multiple_credentials(self): - with self.assertRaises(TypeError) as raised: - basic_auth_protocol_factory( - realm="auth-tests", credentials=[("hello", "iloveyou"), 42] - ) - self.assertEqual( - str(raised.exception), - "invalid credentials argument: [('hello', 'iloveyou'), 42]", - ) - - async def check_credentials(username, password): - return password == "iloveyou" - - create_protocol_check_credentials = basic_auth_protocol_factory( - realm="auth-tests", - check_credentials=check_credentials, - ) - - @with_server(create_protocol=create_protocol_check_credentials) - @with_client(user_info=("hello", "iloveyou")) - def test_basic_auth_check_credentials(self): - self.loop.run_until_complete(self.client.send("Hello!")) - self.loop.run_until_complete(self.client.recv()) - - create_protocol_custom_protocol = basic_auth_protocol_factory( - realm="auth-tests", - credentials=[("hello", "iloveyou")], - create_protocol=CustomWebSocketServerProtocol, - ) - - @with_server(create_protocol=create_protocol_custom_protocol) - @with_client(user_info=("hello", "iloveyou")) - def test_basic_auth_custom_protocol(self): - self.assertTrue(CustomWebSocketServerProtocol.used) - del CustomWebSocketServerProtocol.used - self.loop.run_until_complete(self.client.send("Hello!")) - self.loop.run_until_complete(self.client.recv()) - - @with_server(create_protocol=create_protocol) - def test_basic_auth_missing_credentials(self): - with self.assertRaises(InvalidStatusCode) as raised: - self.start_client() - self.assertEqual(raised.exception.status_code, 401) - - @with_server(create_protocol=create_protocol) - def test_basic_auth_missing_credentials_details(self): - with self.assertRaises(urllib.error.HTTPError) as raised: - self.loop.run_until_complete(self.make_http_request()) - self.assertEqual(raised.exception.code, 401) - self.assertEqual( - raised.exception.headers["WWW-Authenticate"], - 'Basic realm="auth-tests", charset="UTF-8"', - ) - self.assertEqual(raised.exception.read().decode(), "Missing credentials\n") - - @with_server(create_protocol=create_protocol) - def test_basic_auth_unsupported_credentials(self): - with self.assertRaises(InvalidStatusCode) as raised: - self.start_client(extra_headers={"Authorization": "Digest ..."}) - self.assertEqual(raised.exception.status_code, 401) - - @with_server(create_protocol=create_protocol) - def test_basic_auth_unsupported_credentials_details(self): - with self.assertRaises(urllib.error.HTTPError) as raised: - self.loop.run_until_complete( - self.make_http_request(headers={"Authorization": "Digest ..."}) - ) - self.assertEqual(raised.exception.code, 401) - self.assertEqual( - raised.exception.headers["WWW-Authenticate"], - 'Basic realm="auth-tests", charset="UTF-8"', - ) - self.assertEqual(raised.exception.read().decode(), "Unsupported credentials\n") - - @with_server(create_protocol=create_protocol) - def test_basic_auth_invalid_credentials(self): - with self.assertRaises(InvalidStatusCode) as raised: - self.start_client(user_info=("hello", "ihateyou")) - self.assertEqual(raised.exception.status_code, 401) - - @with_server(create_protocol=create_protocol) - def test_basic_auth_invalid_credentials_details(self): - with self.assertRaises(urllib.error.HTTPError) as raised: - authorization = build_authorization_basic("hello", "ihateyou") - self.loop.run_until_complete( - self.make_http_request(headers={"Authorization": authorization}) - ) - self.assertEqual(raised.exception.code, 401) - self.assertEqual( - raised.exception.headers["WWW-Authenticate"], - 'Basic realm="auth-tests", charset="UTF-8"', - ) - self.assertEqual(raised.exception.read().decode(), "Invalid credentials\n") +# Check that the legacy auth module imports without an exception. +from websockets.auth import * # noqa diff --git a/tests/test_exports.py b/tests/test_exports.py index 7fcbc80e3..8e4330304 100644 --- a/tests/test_exports.py +++ b/tests/test_exports.py @@ -4,10 +4,12 @@ combined_exports = ( - websockets.auth.__all__ + websockets.legacy.auth.__all__ + + websockets.legacy.client.__all__ + + websockets.legacy.protocol.__all__ + + websockets.legacy.server.__all__ + websockets.client.__all__ + websockets.exceptions.__all__ - + websockets.protocol.__all__ + websockets.server.__all__ + websockets.typing.__all__ + websockets.uri.__all__ diff --git a/tests/test_framing.py b/tests/test_framing.py index 231cbf718..d6fa6352a 100644 --- a/tests/test_framing.py +++ b/tests/test_framing.py @@ -1,171 +1,9 @@ -import asyncio -import codecs -import unittest -import unittest.mock import warnings -from websockets.exceptions import PayloadTooBig, ProtocolError -from websockets.frames import OP_BINARY, OP_CLOSE, OP_PING, OP_PONG, OP_TEXT -from websockets.framing import * -from .utils import AsyncioTestCase - - -class FramingTests(AsyncioTestCase): - def decode(self, message, mask=False, max_size=None, extensions=None): - stream = asyncio.StreamReader(loop=self.loop) - stream.feed_data(message) - stream.feed_eof() - with warnings.catch_warnings(record=True): - frame = self.loop.run_until_complete( - Frame.read( - stream.readexactly, - mask=mask, - max_size=max_size, - extensions=extensions, - ) - ) - # Make sure all the data was consumed. - self.assertTrue(stream.at_eof()) - return frame - - def encode(self, frame, mask=False, extensions=None): - write = unittest.mock.Mock() - with warnings.catch_warnings(record=True): - frame.write(write, mask=mask, extensions=extensions) - # Ensure the entire frame is sent with a single call to write(). - # Multiple calls cause TCP fragmentation and degrade performance. - self.assertEqual(write.call_count, 1) - # The frame data is the single positional argument of that call. - self.assertEqual(len(write.call_args[0]), 1) - self.assertEqual(len(write.call_args[1]), 0) - return write.call_args[0][0] - - def round_trip(self, message, expected, mask=False, extensions=None): - decoded = self.decode(message, mask, extensions=extensions) - self.assertEqual(decoded, expected) - encoded = self.encode(decoded, mask, extensions=extensions) - if mask: # non-deterministic encoding - decoded = self.decode(encoded, mask, extensions=extensions) - self.assertEqual(decoded, expected) - else: # deterministic encoding - self.assertEqual(encoded, message) - - def test_text(self): - self.round_trip(b"\x81\x04Spam", Frame(True, OP_TEXT, b"Spam")) - - def test_text_masked(self): - self.round_trip( - b"\x81\x84\x5b\xfb\xe1\xa8\x08\x8b\x80\xc5", - Frame(True, OP_TEXT, b"Spam"), - mask=True, - ) - - def test_binary(self): - self.round_trip(b"\x82\x04Eggs", Frame(True, OP_BINARY, b"Eggs")) - - def test_binary_masked(self): - self.round_trip( - b"\x82\x84\x53\xcd\xe2\x89\x16\xaa\x85\xfa", - Frame(True, OP_BINARY, b"Eggs"), - mask=True, - ) - - def test_non_ascii_text(self): - self.round_trip( - b"\x81\x05caf\xc3\xa9", Frame(True, OP_TEXT, "café".encode("utf-8")) - ) - - def test_non_ascii_text_masked(self): - self.round_trip( - b"\x81\x85\x64\xbe\xee\x7e\x07\xdf\x88\xbd\xcd", - Frame(True, OP_TEXT, "café".encode("utf-8")), - mask=True, - ) - - def test_close(self): - self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b"")) - - def test_ping(self): - self.round_trip(b"\x89\x04ping", Frame(True, OP_PING, b"ping")) - - def test_pong(self): - self.round_trip(b"\x8a\x04pong", Frame(True, OP_PONG, b"pong")) - - def test_long(self): - self.round_trip( - b"\x82\x7e\x00\x7e" + 126 * b"a", Frame(True, OP_BINARY, 126 * b"a") - ) - - def test_very_long(self): - self.round_trip( - b"\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x00" + 65536 * b"a", - Frame(True, OP_BINARY, 65536 * b"a"), - ) - - def test_payload_too_big(self): - with self.assertRaises(PayloadTooBig): - self.decode(b"\x82\x7e\x04\x01" + 1025 * b"a", max_size=1024) - - def test_bad_reserved_bits(self): - for encoded in [b"\xc0\x00", b"\xa0\x00", b"\x90\x00"]: - with self.subTest(encoded=encoded): - with self.assertRaises(ProtocolError): - self.decode(encoded) - - def test_good_opcode(self): - for opcode in list(range(0x00, 0x03)) + list(range(0x08, 0x0B)): - encoded = bytes([0x80 | opcode, 0]) - with self.subTest(encoded=encoded): - self.decode(encoded) # does not raise an exception - - def test_bad_opcode(self): - for opcode in list(range(0x03, 0x08)) + list(range(0x0B, 0x10)): - encoded = bytes([0x80 | opcode, 0]) - with self.subTest(encoded=encoded): - with self.assertRaises(ProtocolError): - self.decode(encoded) - - def test_mask_flag(self): - # Mask flag correctly set. - self.decode(b"\x80\x80\x00\x00\x00\x00", mask=True) - # Mask flag incorrectly unset. - with self.assertRaises(ProtocolError): - self.decode(b"\x80\x80\x00\x00\x00\x00") - # Mask flag correctly unset. - self.decode(b"\x80\x00") - # Mask flag incorrectly set. - with self.assertRaises(ProtocolError): - self.decode(b"\x80\x00", mask=True) - - def test_control_frame_max_length(self): - # At maximum allowed length. - self.decode(b"\x88\x7e\x00\x7d" + 125 * b"a") - # Above maximum allowed length. - with self.assertRaises(ProtocolError): - self.decode(b"\x88\x7e\x00\x7e" + 126 * b"a") - - def test_fragmented_control_frame(self): - # Fin bit correctly set. - self.decode(b"\x88\x00") - # Fin bit incorrectly unset. - with self.assertRaises(ProtocolError): - self.decode(b"\x08\x00") - - def test_extensions(self): - class Rot13: - @staticmethod - def encode(frame): - assert frame.opcode == OP_TEXT - text = frame.data.decode() - data = codecs.encode(text, "rot13").encode() - return frame._replace(data=data) - - # This extensions is symmetrical. - @staticmethod - def decode(frame, *, max_size=None): - return Rot13.encode(frame) - - self.round_trip( - b"\x81\x05uryyb", Frame(True, OP_TEXT, b"hello"), extensions=[Rot13()] - ) +with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", "websockets.framing is deprecated", DeprecationWarning + ) + # Check that the legacy framing module imports without an exception. + from websockets.framing import * # noqa diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 432c31ef5..f896fcae4 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,1489 +1,2 @@ -import asyncio -import contextlib -import sys -import unittest -import unittest.mock -import warnings - -from websockets.exceptions import ConnectionClosed, InvalidState -from websockets.frames import ( - OP_BINARY, - OP_CLOSE, - OP_CONT, - OP_PING, - OP_PONG, - OP_TEXT, - serialize_close, -) -from websockets.framing import Frame -from websockets.protocol import State, WebSocketCommonProtocol - -from .utils import MS, AsyncioTestCase - - -async def async_iterable(iterable): - for item in iterable: - yield item - - -class TransportMock(unittest.mock.Mock): - """ - Transport mock to control the protocol's inputs and outputs in tests. - - It calls the protocol's connection_made and connection_lost methods like - actual transports. - - It also calls the protocol's connection_open method to bypass the - WebSocket handshake. - - To simulate incoming data, tests call the protocol's data_received and - eof_received methods directly. - - They could also pause_writing and resume_writing to test flow control. - - """ - - # This should happen in __init__ but overriding Mock.__init__ is hard. - def setup_mock(self, loop, protocol): - self.loop = loop - self.protocol = protocol - self._eof = False - self._closing = False - # Simulate a successful TCP handshake. - self.protocol.connection_made(self) - # Simulate a successful WebSocket handshake. - self.protocol.connection_open() - - def can_write_eof(self): - return True - - def write_eof(self): - # When the protocol half-closes the TCP connection, it expects the - # other end to close it. Simulate that. - if not self._eof: - self.loop.call_soon(self.close) - self._eof = True - - def close(self): - # Simulate how actual transports drop the connection. - if not self._closing: - self.loop.call_soon(self.protocol.connection_lost, None) - self._closing = True - - def abort(self): - # Change this to an `if` if tests call abort() multiple times. - assert self.protocol.state is not State.CLOSED - self.loop.call_soon(self.protocol.connection_lost, None) - - -class CommonTests: - """ - Mixin that defines most tests but doesn't inherit unittest.TestCase. - - Tests are run by the ServerTests and ClientTests subclasses. - - """ - - def setUp(self): - super().setUp() - # Disable pings to make it easier to test what frames are sent exactly. - self.protocol = WebSocketCommonProtocol(ping_interval=None) - self.transport = TransportMock() - self.transport.setup_mock(self.loop, self.protocol) - - def tearDown(self): - self.transport.close() - self.loop.run_until_complete(self.protocol.close()) - super().tearDown() - - # Utilities for writing tests. - - def make_drain_slow(self, delay=MS): - # Process connection_made in order to initialize self.protocol.transport. - self.run_loop_once() - - original_drain = self.protocol._drain - - async def delayed_drain(): - await asyncio.sleep( - delay, loop=self.loop if sys.version_info[:2] < (3, 8) else None - ) - await original_drain() - - self.protocol._drain = delayed_drain - - close_frame = Frame(True, OP_CLOSE, serialize_close(1000, "close")) - local_close = Frame(True, OP_CLOSE, serialize_close(1000, "local")) - remote_close = Frame(True, OP_CLOSE, serialize_close(1000, "remote")) - - def receive_frame(self, frame): - """ - Make the protocol receive a frame. - - """ - write = self.protocol.data_received - mask = not self.protocol.is_client - frame.write(write, mask=mask) - - def receive_eof(self): - """ - Make the protocol receive the end of the data stream. - - Since ``WebSocketCommonProtocol.eof_received`` returns ``None``, an - actual transport would close itself after calling it. This function - emulates that behavior. - - """ - self.protocol.eof_received() - self.loop.call_soon(self.transport.close) - - def receive_eof_if_client(self): - """ - Like receive_eof, but only if this is the client side. - - Since the server is supposed to initiate the termination of the TCP - connection, this method helps making tests work for both sides. - - """ - if self.protocol.is_client: - self.receive_eof() - - def close_connection(self, code=1000, reason="close"): - """ - Execute a closing handshake. - - This puts the connection in the CLOSED state. - - """ - close_frame_data = serialize_close(code, reason) - # Prepare the response to the closing handshake from the remote side. - self.receive_frame(Frame(True, OP_CLOSE, close_frame_data)) - self.receive_eof_if_client() - # Trigger the closing handshake from the local side and complete it. - self.loop.run_until_complete(self.protocol.close(code, reason)) - # Empty the outgoing data stream so we can make assertions later on. - self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) - - assert self.protocol.state is State.CLOSED - - def half_close_connection_local(self, code=1000, reason="close"): - """ - Start a closing handshake but do not complete it. - - The main difference with `close_connection` is that the connection is - left in the CLOSING state until the event loop runs again. - - The current implementation returns a task that must be awaited or - canceled, else asyncio complains about destroying a pending task. - - """ - close_frame_data = serialize_close(code, reason) - # Trigger the closing handshake from the local endpoint. - close_task = self.loop.create_task(self.protocol.close(code, reason)) - self.run_loop_once() # wait_for executes - self.run_loop_once() # write_frame executes - # Empty the outgoing data stream so we can make assertions later on. - self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) - - assert self.protocol.state is State.CLOSING - - # Complete the closing sequence at 1ms intervals so the test can run - # at each point even it goes back to the event loop several times. - self.loop.call_later( - MS, self.receive_frame, Frame(True, OP_CLOSE, close_frame_data) - ) - self.loop.call_later(2 * MS, self.receive_eof_if_client) - - # This task must be awaited or canceled by the caller. - return close_task - - def half_close_connection_remote(self, code=1000, reason="close"): - """ - Receive a closing handshake but do not complete it. - - The main difference with `close_connection` is that the connection is - left in the CLOSING state until the event loop runs again. - - """ - # On the server side, websockets completes the closing handshake and - # closes the TCP connection immediately. Yield to the event loop after - # sending the close frame to run the test while the connection is in - # the CLOSING state. - if not self.protocol.is_client: - self.make_drain_slow() - - close_frame_data = serialize_close(code, reason) - # Trigger the closing handshake from the remote endpoint. - self.receive_frame(Frame(True, OP_CLOSE, close_frame_data)) - self.run_loop_once() # read_frame executes - # Empty the outgoing data stream so we can make assertions later on. - self.assertOneFrameSent(True, OP_CLOSE, close_frame_data) - - assert self.protocol.state is State.CLOSING - - # Complete the closing sequence at 1ms intervals so the test can run - # at each point even it goes back to the event loop several times. - self.loop.call_later(2 * MS, self.receive_eof_if_client) - - def process_invalid_frames(self): - """ - Make the protocol fail quickly after simulating invalid data. - - To achieve this, this function triggers the protocol's eof_received, - which interrupts pending reads waiting for more data. - - """ - self.run_loop_once() - self.receive_eof() - self.loop.run_until_complete(self.protocol.close_connection_task) - - def sent_frames(self): - """ - Read all frames sent to the transport. - - """ - stream = asyncio.StreamReader(loop=self.loop) - - for (data,), kw in self.transport.write.call_args_list: - stream.feed_data(data) - self.transport.write.call_args_list = [] - stream.feed_eof() - - frames = [] - while not stream.at_eof(): - frames.append( - self.loop.run_until_complete( - Frame.read(stream.readexactly, mask=self.protocol.is_client) - ) - ) - return frames - - def last_sent_frame(self): - """ - Read the last frame sent to the transport. - - This method assumes that at most one frame was sent. It raises an - AssertionError otherwise. - - """ - frames = self.sent_frames() - if frames: - assert len(frames) == 1 - return frames[0] - - def assertFramesSent(self, *frames): - self.assertEqual(self.sent_frames(), [Frame(*args) for args in frames]) - - def assertOneFrameSent(self, *args): - self.assertEqual(self.last_sent_frame(), Frame(*args)) - - def assertNoFrameSent(self): - self.assertIsNone(self.last_sent_frame()) - - def assertConnectionClosed(self, code, message): - # The following line guarantees that connection_lost was called. - self.assertEqual(self.protocol.state, State.CLOSED) - # A close frame was received. - self.assertEqual(self.protocol.close_code, code) - self.assertEqual(self.protocol.close_reason, message) - - def assertConnectionFailed(self, code, message): - # The following line guarantees that connection_lost was called. - self.assertEqual(self.protocol.state, State.CLOSED) - # No close frame was received. - self.assertEqual(self.protocol.close_code, 1006) - self.assertEqual(self.protocol.close_reason, "") - # A close frame was sent -- unless the connection was already lost. - if code == 1006: - self.assertNoFrameSent() - else: - self.assertOneFrameSent(True, OP_CLOSE, serialize_close(code, message)) - - @contextlib.contextmanager - def assertCompletesWithin(self, min_time, max_time): - t0 = self.loop.time() - yield - t1 = self.loop.time() - dt = t1 - t0 - self.assertGreaterEqual(dt, min_time, f"Too fast: {dt} < {min_time}") - self.assertLess(dt, max_time, f"Too slow: {dt} >= {max_time}") - - # Test constructor. - - def test_timeout_backwards_compatibility(self): - with warnings.catch_warnings(record=True) as recorded_warnings: - protocol = WebSocketCommonProtocol(timeout=5) - - self.assertEqual(protocol.close_timeout, 5) - - self.assertEqual(len(recorded_warnings), 1) - warning = recorded_warnings[0].message - self.assertEqual(str(warning), "rename timeout to close_timeout") - self.assertEqual(type(warning), DeprecationWarning) - - # Test public attributes. - - def test_local_address(self): - get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) - self.transport.get_extra_info = get_extra_info - - self.assertEqual(self.protocol.local_address, ("host", 4312)) - get_extra_info.assert_called_with("sockname") - - def test_local_address_before_connection(self): - # Emulate the situation before connection_open() runs. - _transport = self.protocol.transport - del self.protocol.transport - try: - self.assertEqual(self.protocol.local_address, None) - finally: - self.protocol.transport = _transport - - def test_remote_address(self): - get_extra_info = unittest.mock.Mock(return_value=("host", 4312)) - self.transport.get_extra_info = get_extra_info - - self.assertEqual(self.protocol.remote_address, ("host", 4312)) - get_extra_info.assert_called_with("peername") - - def test_remote_address_before_connection(self): - # Emulate the situation before connection_open() runs. - _transport = self.protocol.transport - del self.protocol.transport - try: - self.assertEqual(self.protocol.remote_address, None) - finally: - self.protocol.transport = _transport - - def test_open(self): - self.assertTrue(self.protocol.open) - self.close_connection() - self.assertFalse(self.protocol.open) - - def test_closed(self): - self.assertFalse(self.protocol.closed) - self.close_connection() - self.assertTrue(self.protocol.closed) - - def test_wait_closed(self): - wait_closed = self.loop.create_task(self.protocol.wait_closed()) - self.assertFalse(wait_closed.done()) - self.close_connection() - self.assertTrue(wait_closed.done()) - - # Test the recv coroutine. - - def test_recv_text(self): - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café") - - def test_recv_binary(self): - self.receive_frame(Frame(True, OP_BINARY, b"tea")) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b"tea") - - def test_recv_on_closing_connection_local(self): - close_task = self.half_close_connection_local() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.recv()) - - self.loop.run_until_complete(close_task) # cleanup - - def test_recv_on_closing_connection_remote(self): - self.half_close_connection_remote() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.recv()) - - def test_recv_on_closed_connection(self): - self.close_connection() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.recv()) - - def test_recv_protocol_error(self): - self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8"))) - self.process_invalid_frames() - self.assertConnectionFailed(1002, "") - - def test_recv_unicode_error(self): - self.receive_frame(Frame(True, OP_TEXT, "café".encode("latin-1"))) - self.process_invalid_frames() - self.assertConnectionFailed(1007, "") - - def test_recv_text_payload_too_big(self): - self.protocol.max_size = 1024 - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) - self.process_invalid_frames() - self.assertConnectionFailed(1009, "") - - def test_recv_binary_payload_too_big(self): - self.protocol.max_size = 1024 - self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) - self.process_invalid_frames() - self.assertConnectionFailed(1009, "") - - def test_recv_text_no_max_size(self): - self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8") * 205)) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café" * 205) - - def test_recv_binary_no_max_size(self): - self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(True, OP_BINARY, b"tea" * 342)) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b"tea" * 342) - - def test_recv_queue_empty(self): - recv = self.loop.create_task(self.protocol.recv()) - with self.assertRaises(asyncio.TimeoutError): - self.loop.run_until_complete( - asyncio.wait_for(asyncio.shield(recv), timeout=MS) - ) - - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - data = self.loop.run_until_complete(recv) - self.assertEqual(data, "café") - - def test_recv_queue_full(self): - self.protocol.max_queue = 2 - # Test internals because it's hard to verify buffers from the outside. - self.assertEqual(list(self.protocol.messages), []) - - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ["café"]) - - self.receive_frame(Frame(True, OP_BINARY, b"tea")) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) - - self.receive_frame(Frame(True, OP_BINARY, b"milk")) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), ["café", b"tea"]) - - self.loop.run_until_complete(self.protocol.recv()) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), [b"tea", b"milk"]) - - self.loop.run_until_complete(self.protocol.recv()) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), [b"milk"]) - - self.loop.run_until_complete(self.protocol.recv()) - self.run_loop_once() - self.assertEqual(list(self.protocol.messages), []) - - def test_recv_queue_no_limit(self): - self.protocol.max_queue = None - - for _ in range(100): - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - self.run_loop_once() - - # Incoming message queue can contain at least 100 messages. - self.assertEqual(list(self.protocol.messages), ["café"] * 100) - - for _ in range(100): - self.loop.run_until_complete(self.protocol.recv()) - - self.assertEqual(list(self.protocol.messages), []) - - def test_recv_other_error(self): - async def read_message(): - raise Exception("BOOM") - - self.protocol.read_message = read_message - self.process_invalid_frames() - self.assertConnectionFailed(1011, "") - - def test_recv_canceled(self): - recv = self.loop.create_task(self.protocol.recv()) - self.loop.call_soon(recv.cancel) - - with self.assertRaises(asyncio.CancelledError): - self.loop.run_until_complete(recv) - - # The next frame doesn't disappear in a vacuum (it used to). - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café") - - def test_recv_canceled_race_condition(self): - recv = self.loop.create_task( - asyncio.wait_for(self.protocol.recv(), timeout=0.000_001) - ) - self.loop.call_soon( - self.receive_frame, Frame(True, OP_TEXT, "café".encode("utf-8")) - ) - - with self.assertRaises(asyncio.TimeoutError): - self.loop.run_until_complete(recv) - - # The previous frame doesn't disappear in a vacuum (it used to). - self.receive_frame(Frame(True, OP_TEXT, "tea".encode("utf-8"))) - data = self.loop.run_until_complete(self.protocol.recv()) - # If we're getting "tea" there, it means "café" was swallowed (ha, ha). - self.assertEqual(data, "café") - - def test_recv_when_transfer_data_cancelled(self): - # Clog incoming queue. - self.protocol.max_queue = 1 - self.receive_frame(Frame(True, OP_TEXT, "café".encode("utf-8"))) - self.receive_frame(Frame(True, OP_BINARY, b"tea")) - self.run_loop_once() - - # Flow control kicks in (check with an implementation detail). - self.assertFalse(self.protocol._put_message_waiter.done()) - - # Schedule recv(). - recv = self.loop.create_task(self.protocol.recv()) - - # Cancel transfer_data_task (again, implementation detail). - self.protocol.fail_connection() - self.run_loop_once() - self.assertTrue(self.protocol.transfer_data_task.cancelled()) - - # recv() completes properly. - self.assertEqual(self.loop.run_until_complete(recv), "café") - - def test_recv_prevents_concurrent_calls(self): - recv = self.loop.create_task(self.protocol.recv()) - - with self.assertRaises(RuntimeError) as raised: - self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual( - str(raised.exception), - "cannot call recv while another coroutine " - "is already waiting for the next message", - ) - recv.cancel() - - # Test the send coroutine. - - def test_send_text(self): - self.loop.run_until_complete(self.protocol.send("café")) - self.assertOneFrameSent(True, OP_TEXT, "café".encode("utf-8")) - - def test_send_binary(self): - self.loop.run_until_complete(self.protocol.send(b"tea")) - self.assertOneFrameSent(True, OP_BINARY, b"tea") - - def test_send_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.send(bytearray(b"tea"))) - self.assertOneFrameSent(True, OP_BINARY, b"tea") - - def test_send_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.send(memoryview(b"tea"))) - self.assertOneFrameSent(True, OP_BINARY, b"tea") - - def test_send_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_BINARY, b"tea") - - def test_send_dict(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send({"not": "encoded"})) - self.assertNoFrameSent() - - def test_send_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send(42)) - self.assertNoFrameSent() - - def test_send_iterable_text(self): - self.loop.run_until_complete(self.protocol.send(["ca", "fé"])) - self.assertFramesSent( - (False, OP_TEXT, "ca".encode("utf-8")), - (False, OP_CONT, "fé".encode("utf-8")), - (True, OP_CONT, "".encode("utf-8")), - ) - - def test_send_iterable_binary(self): - self.loop.run_until_complete(self.protocol.send([b"te", b"a"])) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_iterable_binary_from_bytearray(self): - self.loop.run_until_complete( - self.protocol.send([bytearray(b"te"), bytearray(b"a")]) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_iterable_binary_from_memoryview(self): - self.loop.run_until_complete( - self.protocol.send([memoryview(b"te"), memoryview(b"a")]) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_iterable_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete( - self.protocol.send([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_empty_iterable(self): - self.loop.run_until_complete(self.protocol.send([])) - self.assertNoFrameSent() - - def test_send_iterable_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send([42])) - self.assertNoFrameSent() - - def test_send_iterable_mixed_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send(["café", b"tea"])) - self.assertFramesSent( - (False, OP_TEXT, "café".encode("utf-8")), - (True, OP_CLOSE, serialize_close(1011, "")), - ) - - def test_send_iterable_prevents_concurrent_send(self): - self.make_drain_slow(2 * MS) - - async def send_iterable(): - await self.protocol.send(["ca", "fé"]) - - async def send_concurrent(): - await asyncio.sleep(MS) - await self.protocol.send(b"tea") - - self.loop.run_until_complete(asyncio.gather(send_iterable(), send_concurrent())) - self.assertFramesSent( - (False, OP_TEXT, "ca".encode("utf-8")), - (False, OP_CONT, "fé".encode("utf-8")), - (True, OP_CONT, "".encode("utf-8")), - (True, OP_BINARY, b"tea"), - ) - - def test_send_async_iterable_text(self): - self.loop.run_until_complete(self.protocol.send(async_iterable(["ca", "fé"]))) - self.assertFramesSent( - (False, OP_TEXT, "ca".encode("utf-8")), - (False, OP_CONT, "fé".encode("utf-8")), - (True, OP_CONT, "".encode("utf-8")), - ) - - def test_send_async_iterable_binary(self): - self.loop.run_until_complete(self.protocol.send(async_iterable([b"te", b"a"]))) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_async_iterable_binary_from_bytearray(self): - self.loop.run_until_complete( - self.protocol.send(async_iterable([bytearray(b"te"), bytearray(b"a")])) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_async_iterable_binary_from_memoryview(self): - self.loop.run_until_complete( - self.protocol.send(async_iterable([memoryview(b"te"), memoryview(b"a")])) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_async_iterable_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete( - self.protocol.send( - async_iterable([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) - ) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - - def test_send_empty_async_iterable(self): - self.loop.run_until_complete(self.protocol.send(async_iterable([]))) - self.assertNoFrameSent() - - def test_send_async_iterable_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.send(async_iterable([42]))) - self.assertNoFrameSent() - - def test_send_async_iterable_mixed_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete( - self.protocol.send(async_iterable(["café", b"tea"])) - ) - self.assertFramesSent( - (False, OP_TEXT, "café".encode("utf-8")), - (True, OP_CLOSE, serialize_close(1011, "")), - ) - - def test_send_async_iterable_prevents_concurrent_send(self): - self.make_drain_slow(2 * MS) - - async def send_async_iterable(): - await self.protocol.send(async_iterable(["ca", "fé"])) - - async def send_concurrent(): - await asyncio.sleep(MS) - await self.protocol.send(b"tea") - - self.loop.run_until_complete( - asyncio.gather(send_async_iterable(), send_concurrent()) - ) - self.assertFramesSent( - (False, OP_TEXT, "ca".encode("utf-8")), - (False, OP_CONT, "fé".encode("utf-8")), - (True, OP_CONT, "".encode("utf-8")), - (True, OP_BINARY, b"tea"), - ) - - def test_send_on_closing_connection_local(self): - close_task = self.half_close_connection_local() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send("foobar")) - - self.assertNoFrameSent() - - self.loop.run_until_complete(close_task) # cleanup - - def test_send_on_closing_connection_remote(self): - self.half_close_connection_remote() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send("foobar")) - - self.assertNoFrameSent() - - def test_send_on_closed_connection(self): - self.close_connection() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.send("foobar")) - - self.assertNoFrameSent() - - # Test the ping coroutine. - - def test_ping_default(self): - self.loop.run_until_complete(self.protocol.ping()) - # With our testing tools, it's more convenient to extract the expected - # ping data from the library's internals than from the frame sent. - ping_data = next(iter(self.protocol.pings)) - self.assertIsInstance(ping_data, bytes) - self.assertEqual(len(ping_data), 4) - self.assertOneFrameSent(True, OP_PING, ping_data) - - def test_ping_text(self): - self.loop.run_until_complete(self.protocol.ping("café")) - self.assertOneFrameSent(True, OP_PING, "café".encode("utf-8")) - - def test_ping_binary(self): - self.loop.run_until_complete(self.protocol.ping(b"tea")) - self.assertOneFrameSent(True, OP_PING, b"tea") - - def test_ping_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.ping(bytearray(b"tea"))) - self.assertOneFrameSent(True, OP_PING, b"tea") - - def test_ping_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.ping(memoryview(b"tea"))) - self.assertOneFrameSent(True, OP_PING, b"tea") - - def test_ping_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.ping(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_PING, b"tea") - - def test_ping_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.ping(42)) - self.assertNoFrameSent() - - def test_ping_on_closing_connection_local(self): - close_task = self.half_close_connection_local() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.ping()) - - self.assertNoFrameSent() - - self.loop.run_until_complete(close_task) # cleanup - - def test_ping_on_closing_connection_remote(self): - self.half_close_connection_remote() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.ping()) - - self.assertNoFrameSent() - - def test_ping_on_closed_connection(self): - self.close_connection() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.ping()) - - self.assertNoFrameSent() - - # Test the pong coroutine. - - def test_pong_default(self): - self.loop.run_until_complete(self.protocol.pong()) - self.assertOneFrameSent(True, OP_PONG, b"") - - def test_pong_text(self): - self.loop.run_until_complete(self.protocol.pong("café")) - self.assertOneFrameSent(True, OP_PONG, "café".encode("utf-8")) - - def test_pong_binary(self): - self.loop.run_until_complete(self.protocol.pong(b"tea")) - self.assertOneFrameSent(True, OP_PONG, b"tea") - - def test_pong_binary_from_bytearray(self): - self.loop.run_until_complete(self.protocol.pong(bytearray(b"tea"))) - self.assertOneFrameSent(True, OP_PONG, b"tea") - - def test_pong_binary_from_memoryview(self): - self.loop.run_until_complete(self.protocol.pong(memoryview(b"tea"))) - self.assertOneFrameSent(True, OP_PONG, b"tea") - - def test_pong_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.pong(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_PONG, b"tea") - - def test_pong_type_error(self): - with self.assertRaises(TypeError): - self.loop.run_until_complete(self.protocol.pong(42)) - self.assertNoFrameSent() - - def test_pong_on_closing_connection_local(self): - close_task = self.half_close_connection_local() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.pong()) - - self.assertNoFrameSent() - - self.loop.run_until_complete(close_task) # cleanup - - def test_pong_on_closing_connection_remote(self): - self.half_close_connection_remote() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.pong()) - - self.assertNoFrameSent() - - def test_pong_on_closed_connection(self): - self.close_connection() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.pong()) - - self.assertNoFrameSent() - - # Test the protocol's logic for acknowledging pings with pongs. - - def test_answer_ping(self): - self.receive_frame(Frame(True, OP_PING, b"test")) - self.run_loop_once() - self.assertOneFrameSent(True, OP_PONG, b"test") - - def test_ignore_pong(self): - self.receive_frame(Frame(True, OP_PONG, b"test")) - self.run_loop_once() - self.assertNoFrameSent() - - def test_acknowledge_ping(self): - ping = self.loop.run_until_complete(self.protocol.ping()) - self.assertFalse(ping.done()) - ping_frame = self.last_sent_frame() - pong_frame = Frame(True, OP_PONG, ping_frame.data) - self.receive_frame(pong_frame) - self.run_loop_once() - self.run_loop_once() - self.assertTrue(ping.done()) - - def test_abort_ping(self): - ping = self.loop.run_until_complete(self.protocol.ping()) - # Remove the frame from the buffer, else close_connection() complains. - self.last_sent_frame() - self.assertFalse(ping.done()) - self.close_connection() - self.assertTrue(ping.done()) - self.assertIsInstance(ping.exception(), ConnectionClosed) - - def test_abort_ping_does_not_log_exception_if_not_retreived(self): - self.loop.run_until_complete(self.protocol.ping()) - # Get the internal Future, which isn't directly returned by ping(). - (ping,) = self.protocol.pings.values() - # Remove the frame from the buffer, else close_connection() complains. - self.last_sent_frame() - self.close_connection() - # Check a private attribute, for lack of a better solution. - self.assertFalse(ping._log_traceback) - - def test_acknowledge_previous_pings(self): - pings = [ - (self.loop.run_until_complete(self.protocol.ping()), self.last_sent_frame()) - for i in range(3) - ] - # Unsolicited pong doesn't acknowledge pings - self.receive_frame(Frame(True, OP_PONG, b"")) - self.run_loop_once() - self.run_loop_once() - self.assertFalse(pings[0][0].done()) - self.assertFalse(pings[1][0].done()) - self.assertFalse(pings[2][0].done()) - # Pong acknowledges all previous pings - self.receive_frame(Frame(True, OP_PONG, pings[1][1].data)) - self.run_loop_once() - self.run_loop_once() - self.assertTrue(pings[0][0].done()) - self.assertTrue(pings[1][0].done()) - self.assertFalse(pings[2][0].done()) - - def test_acknowledge_aborted_ping(self): - ping = self.loop.run_until_complete(self.protocol.ping()) - ping_frame = self.last_sent_frame() - # Clog incoming queue. This lets connection_lost() abort pending pings - # with a ConnectionClosed exception before transfer_data_task - # terminates and close_connection cancels keepalive_ping_task. - self.protocol.max_queue = 1 - self.receive_frame(Frame(True, OP_TEXT, b"1")) - self.receive_frame(Frame(True, OP_TEXT, b"2")) - # Add pong frame to the queue. - pong_frame = Frame(True, OP_PONG, ping_frame.data) - self.receive_frame(pong_frame) - # Connection drops. - self.receive_eof() - self.loop.run_until_complete(self.protocol.wait_closed()) - # Ping receives a ConnectionClosed exception. - with self.assertRaises(ConnectionClosed): - ping.result() - - # transfer_data doesn't crash, which would be logged. - with self.assertNoLogs(): - # Unclog incoming queue. - self.loop.run_until_complete(self.protocol.recv()) - self.loop.run_until_complete(self.protocol.recv()) - - def test_canceled_ping(self): - ping = self.loop.run_until_complete(self.protocol.ping()) - ping_frame = self.last_sent_frame() - ping.cancel() - pong_frame = Frame(True, OP_PONG, ping_frame.data) - self.receive_frame(pong_frame) - self.run_loop_once() - self.run_loop_once() - self.assertTrue(ping.cancelled()) - - def test_duplicate_ping(self): - self.loop.run_until_complete(self.protocol.ping(b"foobar")) - self.assertOneFrameSent(True, OP_PING, b"foobar") - with self.assertRaises(ValueError): - self.loop.run_until_complete(self.protocol.ping(b"foobar")) - self.assertNoFrameSent() - - # Test the protocol's logic for rebuilding fragmented messages. - - def test_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) - self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café") - - def test_fragmented_binary(self): - self.receive_frame(Frame(False, OP_BINARY, b"t")) - self.receive_frame(Frame(False, OP_CONT, b"e")) - self.receive_frame(Frame(True, OP_CONT, b"a")) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b"tea") - - def test_fragmented_text_payload_too_big(self): - self.protocol.max_size = 1024 - self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) - self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) - self.process_invalid_frames() - self.assertConnectionFailed(1009, "") - - def test_fragmented_binary_payload_too_big(self): - self.protocol.max_size = 1024 - self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) - self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) - self.process_invalid_frames() - self.assertConnectionFailed(1009, "") - - def test_fragmented_text_no_max_size(self): - self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(False, OP_TEXT, "café".encode("utf-8") * 100)) - self.receive_frame(Frame(True, OP_CONT, "café".encode("utf-8") * 105)) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café" * 205) - - def test_fragmented_binary_no_max_size(self): - self.protocol.max_size = None # for test coverage - self.receive_frame(Frame(False, OP_BINARY, b"tea" * 171)) - self.receive_frame(Frame(True, OP_CONT, b"tea" * 171)) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, b"tea" * 342) - - def test_control_frame_within_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) - self.receive_frame(Frame(True, OP_PING, b"")) - self.receive_frame(Frame(True, OP_CONT, "fé".encode("utf-8"))) - data = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(data, "café") - self.assertOneFrameSent(True, OP_PONG, b"") - - def test_unterminated_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) - # Missing the second part of the fragmented frame. - self.receive_frame(Frame(True, OP_BINARY, b"tea")) - self.process_invalid_frames() - self.assertConnectionFailed(1002, "") - - def test_close_handshake_in_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) - self.receive_frame(Frame(True, OP_CLOSE, b"")) - self.process_invalid_frames() - # The RFC may have overlooked this case: it says that control frames - # can be interjected in the middle of a fragmented message and that a - # close frame must be echoed. Even though there's an unterminated - # message, technically, the closing handshake was successful. - self.assertConnectionClosed(1005, "") - - def test_connection_close_in_fragmented_text(self): - self.receive_frame(Frame(False, OP_TEXT, "ca".encode("utf-8"))) - self.process_invalid_frames() - self.assertConnectionFailed(1006, "") - - # Test miscellaneous code paths to ensure full coverage. - - def test_connection_lost(self): - # Test calling connection_lost without going through close_connection. - self.protocol.connection_lost(None) - - self.assertConnectionFailed(1006, "") - - def test_ensure_open_before_opening_handshake(self): - # Simulate a bug by forcibly reverting the protocol state. - self.protocol.state = State.CONNECTING - - with self.assertRaises(InvalidState): - self.loop.run_until_complete(self.protocol.ensure_open()) - - def test_ensure_open_during_unclean_close(self): - # Process connection_made in order to start transfer_data_task. - self.run_loop_once() - - # Ensure the test terminates quickly. - self.loop.call_later(MS, self.receive_eof_if_client) - - # Simulate the case when close() times out sending a close frame. - self.protocol.fail_connection() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.ensure_open()) - - def test_legacy_recv(self): - # By default legacy_recv in disabled. - self.assertEqual(self.protocol.legacy_recv, False) - - self.close_connection() - - # Enable legacy_recv. - self.protocol.legacy_recv = True - - # Now recv() returns None instead of raising ConnectionClosed. - self.assertIsNone(self.loop.run_until_complete(self.protocol.recv())) - - def test_connection_closed_attributes(self): - self.close_connection() - - with self.assertRaises(ConnectionClosed) as context: - self.loop.run_until_complete(self.protocol.recv()) - - connection_closed_exc = context.exception - self.assertEqual(connection_closed_exc.code, 1000) - self.assertEqual(connection_closed_exc.reason, "close") - - # Test the protocol logic for sending keepalive pings. - - def restart_protocol_with_keepalive_ping( - self, ping_interval=3 * MS, ping_timeout=3 * MS - ): - initial_protocol = self.protocol - # copied from tearDown - self.transport.close() - self.loop.run_until_complete(self.protocol.close()) - # copied from setUp, but enables keepalive pings - self.protocol = WebSocketCommonProtocol( - ping_interval=ping_interval, ping_timeout=ping_timeout - ) - self.transport = TransportMock() - self.transport.setup_mock(self.loop, self.protocol) - self.protocol.is_client = initial_protocol.is_client - self.protocol.side = initial_protocol.side - - def test_keepalive_ping(self): - self.restart_protocol_with_keepalive_ping() - - # Ping is sent at 3ms and acknowledged at 4ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - (ping_1,) = tuple(self.protocol.pings) - self.assertOneFrameSent(True, OP_PING, ping_1) - self.receive_frame(Frame(True, OP_PONG, ping_1)) - - # Next ping is sent at 7ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - (ping_2,) = tuple(self.protocol.pings) - self.assertOneFrameSent(True, OP_PING, ping_2) - - # The keepalive ping task goes on. - self.assertFalse(self.protocol.keepalive_ping_task.done()) - - def test_keepalive_ping_not_acknowledged_closes_connection(self): - self.restart_protocol_with_keepalive_ping() - - # Ping is sent at 3ms and not acknowleged. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - (ping_1,) = tuple(self.protocol.pings) - self.assertOneFrameSent(True, OP_PING, ping_1) - - # Connection is closed at 6ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - self.assertOneFrameSent(True, OP_CLOSE, serialize_close(1011, "")) - - # The keepalive ping task is complete. - self.assertEqual(self.protocol.keepalive_ping_task.result(), None) - - def test_keepalive_ping_stops_when_connection_closing(self): - self.restart_protocol_with_keepalive_ping() - close_task = self.half_close_connection_local() - - # No ping sent at 3ms because the closing handshake is in progress. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - self.assertNoFrameSent() - - # The keepalive ping task terminated. - self.assertTrue(self.protocol.keepalive_ping_task.cancelled()) - - self.loop.run_until_complete(close_task) # cleanup - - def test_keepalive_ping_stops_when_connection_closed(self): - self.restart_protocol_with_keepalive_ping() - self.close_connection() - - # The keepalive ping task terminated. - self.assertTrue(self.protocol.keepalive_ping_task.cancelled()) - - def test_keepalive_ping_does_not_crash_when_connection_lost(self): - self.restart_protocol_with_keepalive_ping() - # Clog incoming queue. This lets connection_lost() abort pending pings - # with a ConnectionClosed exception before transfer_data_task - # terminates and close_connection cancels keepalive_ping_task. - self.protocol.max_queue = 1 - self.receive_frame(Frame(True, OP_TEXT, b"1")) - self.receive_frame(Frame(True, OP_TEXT, b"2")) - # Ping is sent at 3ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - (ping_waiter,) = tuple(self.protocol.pings.values()) - # Connection drops. - self.receive_eof() - self.loop.run_until_complete(self.protocol.wait_closed()) - - # The ping waiter receives a ConnectionClosed exception. - with self.assertRaises(ConnectionClosed): - ping_waiter.result() - # The keepalive ping task terminated properly. - self.assertIsNone(self.protocol.keepalive_ping_task.result()) - - # Unclog incoming queue to terminate the test quickly. - self.loop.run_until_complete(self.protocol.recv()) - self.loop.run_until_complete(self.protocol.recv()) - - def test_keepalive_ping_with_no_ping_interval(self): - self.restart_protocol_with_keepalive_ping(ping_interval=None) - - # No ping is sent at 3ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - self.assertNoFrameSent() - - def test_keepalive_ping_with_no_ping_timeout(self): - self.restart_protocol_with_keepalive_ping(ping_timeout=None) - - # Ping is sent at 3ms and not acknowleged. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - (ping_1,) = tuple(self.protocol.pings) - self.assertOneFrameSent(True, OP_PING, ping_1) - - # Next ping is sent at 7ms anyway. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - ping_1_again, ping_2 = tuple(self.protocol.pings) - self.assertEqual(ping_1, ping_1_again) - self.assertOneFrameSent(True, OP_PING, ping_2) - - # The keepalive ping task goes on. - self.assertFalse(self.protocol.keepalive_ping_task.done()) - - def test_keepalive_ping_unexpected_error(self): - self.restart_protocol_with_keepalive_ping() - - async def ping(): - raise Exception("BOOM") - - self.protocol.ping = ping - - # The keepalive ping task fails when sending a ping at 3ms. - self.loop.run_until_complete(asyncio.sleep(4 * MS)) - - # The keepalive ping task is complete. - # It logs and swallows the exception. - self.assertEqual(self.protocol.keepalive_ping_task.result(), None) - - # Test the protocol logic for closing the connection. - - def test_local_close(self): - # Emulate how the remote endpoint answers the closing handshake. - self.loop.call_later(MS, self.receive_frame, self.close_frame) - self.loop.call_later(MS, self.receive_eof_if_client) - - # Run the closing handshake. - self.loop.run_until_complete(self.protocol.close(reason="close")) - - self.assertConnectionClosed(1000, "close") - self.assertOneFrameSent(*self.close_frame) - - # Closing the connection again is a no-op. - self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) - - self.assertConnectionClosed(1000, "close") - self.assertNoFrameSent() - - def test_remote_close(self): - # Emulate how the remote endpoint initiates the closing handshake. - self.loop.call_later(MS, self.receive_frame, self.close_frame) - self.loop.call_later(MS, self.receive_eof_if_client) - - # Wait for some data in order to process the handshake. - # After recv() raises ConnectionClosed, the connection is closed. - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(self.protocol.recv()) - - self.assertConnectionClosed(1000, "close") - self.assertOneFrameSent(*self.close_frame) - - # Closing the connection again is a no-op. - self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) - - self.assertConnectionClosed(1000, "close") - self.assertNoFrameSent() - - def test_remote_close_and_connection_lost(self): - self.make_drain_slow() - # Drop the connection right after receiving a close frame, - # which prevents echoing the close frame properly. - self.receive_frame(self.close_frame) - self.receive_eof() - - with self.assertNoLogs(): - self.loop.run_until_complete(self.protocol.close(reason="oh noes!")) - - self.assertConnectionClosed(1000, "close") - self.assertOneFrameSent(*self.close_frame) - - def test_simultaneous_close(self): - # Receive the incoming close frame right after self.protocol.close() - # starts executing. This reproduces the error described in: - # https://github.com/aaugustin/websockets/issues/339 - self.loop.call_soon(self.receive_frame, self.remote_close) - self.loop.call_soon(self.receive_eof_if_client) - - self.loop.run_until_complete(self.protocol.close(reason="local")) - - self.assertConnectionClosed(1000, "remote") - # The current implementation sends a close frame in response to the - # close frame received from the remote end. It skips the close frame - # that should be sent as a result of calling close(). - self.assertOneFrameSent(*self.remote_close) - - def test_close_preserves_incoming_frames(self): - self.receive_frame(Frame(True, OP_TEXT, b"hello")) - - self.loop.call_later(MS, self.receive_frame, self.close_frame) - self.loop.call_later(MS, self.receive_eof_if_client) - self.loop.run_until_complete(self.protocol.close(reason="close")) - - self.assertConnectionClosed(1000, "close") - self.assertOneFrameSent(*self.close_frame) - - next_message = self.loop.run_until_complete(self.protocol.recv()) - self.assertEqual(next_message, "hello") - - def test_close_protocol_error(self): - invalid_close_frame = Frame(True, OP_CLOSE, b"\x00") - self.receive_frame(invalid_close_frame) - self.receive_eof_if_client() - self.run_loop_once() - self.loop.run_until_complete(self.protocol.close(reason="close")) - - self.assertConnectionFailed(1002, "") - - def test_close_connection_lost(self): - self.receive_eof() - self.run_loop_once() - self.loop.run_until_complete(self.protocol.close(reason="close")) - - self.assertConnectionFailed(1006, "") - - def test_local_close_during_recv(self): - recv = self.loop.create_task(self.protocol.recv()) - - self.loop.call_later(MS, self.receive_frame, self.close_frame) - self.loop.call_later(MS, self.receive_eof_if_client) - - self.loop.run_until_complete(self.protocol.close(reason="close")) - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(recv) - - self.assertConnectionClosed(1000, "close") - - # There is no test_remote_close_during_recv because it would be identical - # to test_remote_close. - - def test_remote_close_during_send(self): - self.make_drain_slow() - send = self.loop.create_task(self.protocol.send("hello")) - - self.receive_frame(self.close_frame) - self.receive_eof() - - with self.assertRaises(ConnectionClosed): - self.loop.run_until_complete(send) - - self.assertConnectionClosed(1000, "close") - - # There is no test_local_close_during_send because this cannot really - # happen, considering that writes are serialized. - - -class ServerTests(CommonTests, AsyncioTestCase): - def setUp(self): - super().setUp() - self.protocol.is_client = False - self.protocol.side = "server" - - def test_local_close_send_close_frame_timeout(self): - self.protocol.close_timeout = 10 * MS - self.make_drain_slow(50 * MS) - # If we can't send a close frame, time out in 10ms. - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(9 * MS, 19 * MS): - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1006, "") - - def test_local_close_receive_close_frame_timeout(self): - self.protocol.close_timeout = 10 * MS - # If the client doesn't send a close frame, time out in 10ms. - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(9 * MS, 19 * MS): - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1006, "") - - def test_local_close_connection_lost_timeout_after_write_eof(self): - self.protocol.close_timeout = 10 * MS - # If the client doesn't close its side of the TCP connection after we - # half-close our side with write_eof(), time out in 10ms. - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(9 * MS, 19 * MS): - # HACK: disable write_eof => other end drops connection emulation. - self.transport._eof = True - self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, "close") - - def test_local_close_connection_lost_timeout_after_close(self): - self.protocol.close_timeout = 10 * MS - # If the client doesn't close its side of the TCP connection after we - # half-close our side with write_eof() and close it with close(), time - # out in 20ms. - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(19 * MS, 29 * MS): - # HACK: disable write_eof => other end drops connection emulation. - self.transport._eof = True - # HACK: disable close => other end drops connection emulation. - self.transport._closing = True - self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, "close") - - -class ClientTests(CommonTests, AsyncioTestCase): - def setUp(self): - super().setUp() - self.protocol.is_client = True - self.protocol.side = "client" - - def test_local_close_send_close_frame_timeout(self): - self.protocol.close_timeout = 10 * MS - self.make_drain_slow(50 * MS) - # If we can't send a close frame, time out in 20ms. - # - 10ms waiting for sending a close frame - # - 10ms waiting for receiving a half-close - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(19 * MS, 29 * MS): - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1006, "") - - def test_local_close_receive_close_frame_timeout(self): - self.protocol.close_timeout = 10 * MS - # If the server doesn't send a close frame, time out in 20ms: - # - 10ms waiting for receiving a close frame - # - 10ms waiting for receiving a half-close - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(19 * MS, 29 * MS): - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1006, "") - - def test_local_close_connection_lost_timeout_after_write_eof(self): - self.protocol.close_timeout = 10 * MS - # If the server doesn't half-close its side of the TCP connection - # after we send a close frame, time out in 20ms: - # - 10ms waiting for receiving a half-close - # - 10ms waiting for receiving a close after write_eof - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(19 * MS, 29 * MS): - # HACK: disable write_eof => other end drops connection emulation. - self.transport._eof = True - self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, "close") - - def test_local_close_connection_lost_timeout_after_close(self): - self.protocol.close_timeout = 10 * MS - # If the client doesn't close its side of the TCP connection after we - # half-close our side with write_eof() and close it with close(), time - # out in 20ms. - # - 10ms waiting for receiving a half-close - # - 10ms waiting for receiving a close after write_eof - # - 10ms waiting for receiving a close after close - # Check the timing within -1/+9ms for robustness. - with self.assertCompletesWithin(29 * MS, 39 * MS): - # HACK: disable write_eof => other end drops connection emulation. - self.transport._eof = True - # HACK: disable close => other end drops connection emulation. - self.transport._closing = True - self.receive_frame(self.close_frame) - self.loop.run_until_complete(self.protocol.close(reason="close")) - self.assertConnectionClosed(1000, "close") +# Check that the legacy protocol module imports without an exception. +from websockets.protocol import * # noqa diff --git a/tests/utils.py b/tests/utils.py index 790d25687..ac891a0fd 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,10 +1,4 @@ -import asyncio -import contextlib import email.utils -import functools -import logging -import os -import time import unittest @@ -27,89 +21,3 @@ def assertGeneratorReturns(self, gen): with self.assertRaises(StopIteration) as raised: next(gen) return raised.exception.value - - -class AsyncioTestCase(unittest.TestCase): - """ - Base class for tests that sets up an isolated event loop for each test. - - """ - - def __init_subclass__(cls, **kwargs): - """ - Convert test coroutines to test functions. - - This supports asychronous tests transparently. - - """ - super().__init_subclass__(**kwargs) - for name in unittest.defaultTestLoader.getTestCaseNames(cls): - test = getattr(cls, name) - if asyncio.iscoroutinefunction(test): - setattr(cls, name, cls.convert_async_to_sync(test)) - - @staticmethod - def convert_async_to_sync(test): - """ - Convert a test coroutine to a test function. - - """ - - @functools.wraps(test) - def test_func(self, *args, **kwargs): - return self.loop.run_until_complete(test(self, *args, **kwargs)) - - return test_func - - def setUp(self): - super().setUp() - self.loop = asyncio.new_event_loop() - asyncio.set_event_loop(self.loop) - - def tearDown(self): - self.loop.close() - super().tearDown() - - def run_loop_once(self): - # Process callbacks scheduled with call_soon by appending a callback - # to stop the event loop then running it until it hits that callback. - self.loop.call_soon(self.loop.stop) - self.loop.run_forever() - - @contextlib.contextmanager - def assertNoLogs(self, logger="websockets", level=logging.ERROR): - """ - No message is logged on the given logger with at least the given level. - - """ - with self.assertLogs(logger, level) as logs: - # We want to test that no log message is emitted - # but assertLogs expects at least one log message. - logging.getLogger(logger).log(level, "dummy") - yield - - level_name = logging.getLevelName(level) - self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"]) - - def assertDeprecationWarnings(self, recorded_warnings, expected_warnings): - """ - Check recorded deprecation warnings match a list of expected messages. - - """ - self.assertEqual(len(recorded_warnings), len(expected_warnings)) - for recorded, expected in zip(recorded_warnings, expected_warnings): - actual = recorded.message - self.assertEqual(str(actual), expected) - self.assertEqual(type(actual), DeprecationWarning) - - -# Unit for timeouts. May be increased on slow machines by setting the -# WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. -MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) - -# asyncio's debug mode has a 10x performance penalty for this test suite. -if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover - MS *= 10 - -# Ensure that timeouts are larger than the clock's resolution (for Windows). -MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution) From 9a99229c671711d6274d3914244694e106966268 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 29 Nov 2020 15:45:41 +0100 Subject: [PATCH 239/281] Explain backwards-compatibility & versioning policies. --- docs/changelog.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/changelog.rst b/docs/changelog.rst index 291ec6938..2d2e7ca08 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,6 +3,23 @@ Changelog .. currentmodule:: websockets +Backwards-compatibility policy +.............................. + +``websockets`` is intended for production use. Therefore, stability is a goal. + +``websockets`` also aims at providing the best API for WebSocket in Python. + +While we value stability, we value progress more. When an improvement requires +changing the API, we make the change and document it below. + +When possible with reasonable effort, we preserve backwards-compatibility for +five years after the release that introduced the change. + +When a release contains backwards-incompatible API changes, the major version +is increased, else the minor version is increased. Patch versions are only for +fixing regressions shortly after a release. + 9.0 ... From 9c14a2f981af2da3517564ea7396ea06e19114d3 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 29 Nov 2020 18:02:12 +0100 Subject: [PATCH 240/281] Review and update changelog. * Add missing items for 9.0 release. * Re-assess infos / warnings. * Add release dates. --- docs/changelog.rst | 276 ++++++++++++++++++++++++++++----------------- 1 file changed, 174 insertions(+), 102 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2d2e7ca08..8d255fdfd 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -29,36 +29,54 @@ fixing regressions shortly after a release. **Version 9.0 moves or deprecates several APIs.** - * Import :class:`~datastructures.Headers` and - :exc:`~datastructures.MultipleValuesError` from - :mod:`websockets.datastructures` instead of :mod:`websockets.http`. + * :class:`~datastructures.Headers` and + :exc:`~datastructures.MultipleValuesError` were moved from + ``websockets.http`` to :mod:`websockets.datastructures`. - * :mod:`websockets.client`, :mod:`websockets.server,` - :mod:`websockets.protocol`, and :mod:`websockets.auth` were moved to - :mod:`websockets.legacy.client`, :mod:`websockets.legacy.server`, - :mod:`websockets.legacy.protocol`, and :mod:`websockets.legacy.auth` - respectively. + * ``websockets.client``, ``websockets.server``, ``websockets.protocol``, + and ``websockets.auth`` were moved to :mod:`websockets.legacy.client`, + :mod:`websockets.legacy.server`, :mod:`websockets.legacy.protocol`, and + :mod:`websockets.legacy.auth` respectively. - * :mod:`websockets.handshake` is deprecated. + * ``websockets.handshake`` is deprecated. - * :mod:`websockets.http` is deprecated. + * ``websockets.http`` is deprecated. - * :mod:`websockets.framing` is deprecated. + * ``websockets.framing`` is deprecated. Aliases provide backwards compatibility for all previously public APIs. +* Added compatibility with Python 3.9. + * Added support for IRIs in addition to URIs. +* Added close codes 1012, 1013, and 1014. + +* Raised an error when passing a :class:`dict` to + :meth:`~legacy.protocol.WebSocketCommonProtocol.send`. + +* Fixed ``Host`` header sent when connecting to an IPv6 address. + +* Aligned maximum cookie size with popular web browsers. + +* Ensured cancellation always propagates, even on Python versions where + :exc:`~asyncio.CancelledError` inherits :exc:`Exception`. + +* Improved error reporting. + + 8.1 ... -* Added compatibility with Python 3.8. +*November 1, 2019* -* Added close codes 1012, 1013, and 1014. +* Added compatibility with Python 3.8. 8.0.2 ..... +*July 31, 2019* + * Restored the ability to pass a socket with the ``sock`` parameter of :func:`~legacy.server.serve`. @@ -67,12 +85,16 @@ fixing regressions shortly after a release. 8.0.1 ..... +*July 21, 2019* + * Restored the ability to import ``WebSocketProtocolError`` from ``websockets``. 8.0 ... +*July 7, 2019* + .. warning:: **Version 8.0 drops compatibility with Python 3.4 and 3.5.** @@ -83,7 +105,8 @@ fixing regressions shortly after a release. Previously, it could be a function or a coroutine. - If you're passing a ``process_request`` argument to :func:`~legacy.server.serve` + If you're passing a ``process_request`` argument to + :func:`~legacy.server.serve` or :class:`~legacy.server.WebSocketServerProtocol`, or if you're overriding :meth:`~legacy.server.WebSocketServerProtocol.process_request` in a subclass, define it with ``async def`` instead of ``def``. @@ -103,36 +126,38 @@ fixing regressions shortly after a release. **Version 8.0 deprecates the** ``host`` **,** ``port`` **, and** ``secure`` **attributes of** :class:`~legacy.protocol.WebSocketCommonProtocol`. - Use :attr:`~legacy.protocol.WebSocketCommonProtocol.local_address` in servers and + Use :attr:`~legacy.protocol.WebSocketCommonProtocol.local_address` in + servers and :attr:`~legacy.protocol.WebSocketCommonProtocol.remote_address` in clients instead of ``host`` and ``port``. .. note:: **Version 8.0 renames the** ``WebSocketProtocolError`` **exception** - to :exc:`ProtocolError` **.** + to :exc:`~exceptions.ProtocolError` **.** A ``WebSocketProtocolError`` alias provides backwards compatibility. .. note:: **Version 8.0 adds the reason phrase to the return type of the low-level - API** :func:`~http.read_response` **.** + API** ``read_response()`` **.** Also: * :meth:`~legacy.protocol.WebSocketCommonProtocol.send`, :meth:`~legacy.protocol.WebSocketCommonProtocol.ping`, and - :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` support bytes-like types - :class:`bytearray` and :class:`memoryview` in addition to :class:`bytes`. + :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` support bytes-like + types :class:`bytearray` and :class:`memoryview` in addition to + :class:`bytes`. * Added :exc:`~exceptions.ConnectionClosedOK` and :exc:`~exceptions.ConnectionClosedError` subclasses of :exc:`~exceptions.ConnectionClosed` to tell apart normal connection termination from errors. -* Added :func:`~legacy.auth.basic_auth_protocol_factory` to enforce HTTP Basic Auth - on the server side. +* Added :func:`~legacy.auth.basic_auth_protocol_factory` to enforce HTTP + Basic Auth on the server side. * :func:`~legacy.client.connect` handles redirects from the server during the handshake. @@ -148,8 +173,9 @@ Also: exceptions in keepalive ping task. If you were using ``ping_timeout=None`` as a workaround, you can remove it. -* Changed :meth:`WebSocketServer.close() ` to - perform a proper closing handshake instead of failing the connection. +* Changed :meth:`WebSocketServer.close() + ` to perform a proper closing handshake + instead of failing the connection. * Avoided a crash when a ``extra_headers`` callable returns ``None``. @@ -170,20 +196,20 @@ Also: 7.0 ... -.. warning:: +*November 1, 2018* - **Version 7.0 renames the** ``timeout`` **argument of** - :func:`~legacy.server.serve()` **and** :func:`~legacy.client.connect` **to** - ``close_timeout`` **.** +.. warning:: - This prevents confusion with ``ping_timeout``. + ``websockets`` **now sends Ping frames at regular intervals and closes the + connection if it doesn't receive a matching Pong frame.** - For backwards compatibility, ``timeout`` is still supported. + See :class:`~legacy.protocol.WebSocketCommonProtocol` for details. .. warning:: - **Version 7.0 changes how a server terminates connections when it's - closed with** :meth:`~legacy.server.WebSocketServer.close` **.** + **Version 7.0 changes how a server terminates connections when it's closed + with** :meth:`WebSocketServer.close() + ` **.** Previously, connections handlers were canceled. Now, connections are closed with close code 1001 (going away). From the perspective of the @@ -200,8 +226,19 @@ Also: .. note:: - **Version 7.0 changes how a** :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` - **that hasn't received a pong yet behaves when the connection is closed.** + **Version 7.0 renames the** ``timeout`` **argument of** + :func:`~legacy.server.serve` **and** :func:`~legacy.client.connect` **to** + ``close_timeout`` **.** + + This prevents confusion with ``ping_timeout``. + + For backwards compatibility, ``timeout`` is still supported. + +.. note:: + + **Version 7.0 changes how a** + :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` **that hasn't + received a pong yet behaves when the connection is closed.** The ping — as in ``ping = await websocket.ping()`` — used to be canceled when the connection is closed, so that ``await ping`` raised @@ -211,34 +248,33 @@ Also: .. note:: **Version 7.0 raises a** :exc:`RuntimeError` **exception if two coroutines - call** :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` **concurrently.** + call** :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` + **concurrently.** Concurrent calls lead to non-deterministic behavior because there are no guarantees about which coroutine will receive which message. Also: -* ``websockets`` sends Ping frames at regular intervals and closes the - connection if it doesn't receive a matching Pong frame. See - :class:`~legacy.protocol.WebSocketCommonProtocol` for details. - * Added ``process_request`` and ``select_subprotocol`` arguments to - :func:`~legacy.server.serve` and :class:`~legacy.server.WebSocketServerProtocol` to - customize :meth:`~legacy.server.WebSocketServerProtocol.process_request` and + :func:`~legacy.server.serve` and + :class:`~legacy.server.WebSocketServerProtocol` to customize + :meth:`~legacy.server.WebSocketServerProtocol.process_request` and :meth:`~legacy.server.WebSocketServerProtocol.select_subprotocol` without subclassing :class:`~legacy.server.WebSocketServerProtocol`. * Added support for sending fragmented messages. -* Added the :meth:`~legacy.protocol.WebSocketCommonProtocol.wait_closed` method to - protocols. +* Added the :meth:`~legacy.protocol.WebSocketCommonProtocol.wait_closed` + method to protocols. * Added an interactive client: ``python -m websockets ``. * Changed the ``origins`` argument to represent the lack of an origin with ``None`` rather than ``''``. -* Fixed a data loss bug in :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`: +* Fixed a data loss bug in + :meth:`~legacy.protocol.WebSocketCommonProtocol.recv`: canceling it at the wrong time could result in messages being dropped. * Improved handling of multiple HTTP headers with the same name. @@ -248,36 +284,37 @@ Also: 6.0 ... +*July 16, 2018* + .. warning:: - **Version 6.0 introduces the** :class:`~http.Headers` **class for managing - HTTP headers and changes several public APIs:** + **Version 6.0 introduces the** :class:`~datastructures.Headers` **class + for managing HTTP headers and changes several public APIs:** - * :meth:`~legacy.server.WebSocketServerProtocol.process_request` now receives a - :class:`~http.Headers` instead of a :class:`~http.client.HTTPMessage` in - the ``request_headers`` argument. + * :meth:`~legacy.server.WebSocketServerProtocol.process_request` now + receives a :class:`~datastructures.Headers` instead of a + ``http.client.HTTPMessage`` in the ``request_headers`` argument. - * The :attr:`~legacy.protocol.WebSocketCommonProtocol.request_headers` and - :attr:`~legacy.protocol.WebSocketCommonProtocol.response_headers` attributes of - :class:`~legacy.protocol.WebSocketCommonProtocol` are :class:`~http.Headers` - instead of :class:`~http.client.HTTPMessage`. + * The ``request_headers`` and ``response_headers`` attributes of + :class:`~legacy.protocol.WebSocketCommonProtocol` are + :class:`~datastructures.Headers` instead of ``http.client.HTTPMessage``. - * The :attr:`~legacy.protocol.WebSocketCommonProtocol.raw_request_headers` and - :attr:`~legacy.protocol.WebSocketCommonProtocol.raw_response_headers` - attributes of :class:`~legacy.protocol.WebSocketCommonProtocol` are removed. - Use :meth:`~http.Headers.raw_items` instead. + * The ``raw_request_headers`` and ``raw_response_headers`` attributes of + :class:`~legacy.protocol.WebSocketCommonProtocol` are removed. Use + :meth:`~datastructures.Headers.raw_items` instead. - * Functions defined in the :mod:`~handshake` module now receive - :class:`~http.Headers` in argument instead of ``get_header`` or - ``set_header`` functions. This affects libraries that rely on + * Functions defined in the ``handshake`` module now receive + :class:`~datastructures.Headers` in argument instead of ``get_header`` + or ``set_header`` functions. This affects libraries that rely on low-level APIs. - * Functions defined in the :mod:`~http` module now return HTTP headers as - :class:`~http.Headers` instead of lists of ``(name, value)`` pairs. + * Functions defined in the ``http`` module now return HTTP headers as + :class:`~datastructures.Headers` instead of lists of ``(name, value)`` + pairs. - Since :class:`~http.Headers` and :class:`~http.client.HTTPMessage` provide - similar APIs, this change won't affect most of the code dealing with HTTP - headers. + Since :class:`~datastructures.Headers` and ``http.client.HTTPMessage`` + provide similar APIs, this change won't affect most of the code dealing + with HTTP headers. Also: @@ -287,12 +324,16 @@ Also: 5.0.1 ..... -* Fixed a regression in the 5.0 release that broke some invocations of - :func:`~legacy.server.serve()` and :func:`~legacy.client.connect`. +*May 24, 2018* + +* Fixed a regression in 5.0 that broke some invocations of + :func:`~legacy.server.serve` and :func:`~legacy.client.connect`. 5.0 ... +*May 22, 2018* + .. note:: **Version 5.0 fixes a security issue introduced in version 4.0.** @@ -308,8 +349,8 @@ Also: **Version 5.0 adds a** ``user_info`` **field to the return value of** :func:`~uri.parse_uri` **and** :class:`~uri.WebSocketURI` **.** - If you're unpacking :class:`~exceptions.WebSocketURI` into four variables, - adjust your code to account for that fifth field. + If you're unpacking :class:`~uri.WebSocketURI` into four variables, adjust + your code to account for that fifth field. Also: @@ -322,14 +363,14 @@ Also: * A plain HTTP request now receives a 426 Upgrade Required response and doesn't log a stack trace. -* :func:`~legacy.server.unix_serve` can be used as an asynchronous context manager on - Python ≥ 3.5.1. +* :func:`~legacy.server.unix_serve` can be used as an asynchronous context + manager on Python ≥ 3.5.1. -* Added the :attr:`~legacy.protocol.WebSocketCommonProtocol.closed` property to - protocols. +* Added the :attr:`~legacy.protocol.WebSocketCommonProtocol.closed` property + to protocols. -* If a :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` doesn't receive a pong, - it's canceled when the connection is closed. +* If a :meth:`~legacy.protocol.WebSocketCommonProtocol.ping` doesn't receive a + pong, it's canceled when the connection is closed. * Reported the cause of :exc:`~exceptions.ConnectionClosed` exceptions. @@ -355,13 +396,21 @@ Also: 4.0.1 ..... +*November 2, 2017* + * Fixed issues with the packaging of the 4.0 release. 4.0 ... +*November 2, 2017* + .. warning:: + **Version 4.0 drops compatibility with Python 3.3.** + +.. note:: + **Version 4.0 enables compression with the permessage-deflate extension.** In August 2017, Firefox and Chrome support it, but not Safari and IE. @@ -369,11 +418,7 @@ Also: Compression should improve performance but it increases RAM and CPU use. If you want to disable compression, add ``compression=None`` when calling - :func:`~legacy.server.serve()` or :func:`~legacy.client.connect`. - -.. warning:: - - **Version 4.0 drops compatibility with Python 3.3.** + :func:`~legacy.server.serve` or :func:`~legacy.client.connect`. .. note:: @@ -388,8 +433,8 @@ Also: * Added :func:`~legacy.server.unix_serve` for listening on Unix sockets. -* Added the :attr:`~legacy.server.WebSocketServer.sockets` attribute to the return - value of :func:`~legacy.server.serve`. +* Added the :attr:`~legacy.server.WebSocketServer.sockets` attribute to the + return value of :func:`~legacy.server.serve`. * Reorganized and extended documentation. @@ -407,12 +452,14 @@ Also: 3.4 ... -* Renamed :func:`~legacy.server.serve()` and :func:`~legacy.client.connect`'s ``klass`` - argument to ``create_protocol`` to reflect that it can also be a callable. - For backwards compatibility, ``klass`` is still supported. +*August 20, 2017* + +* Renamed :func:`~legacy.server.serve` and :func:`~legacy.client.connect`'s + ``klass`` argument to ``create_protocol`` to reflect that it can also be a + callable. For backwards compatibility, ``klass`` is still supported. -* :func:`~legacy.server.serve` can be used as an asynchronous context manager on - Python ≥ 3.5.1. +* :func:`~legacy.server.serve` can be used as an asynchronous context manager + on Python ≥ 3.5.1. * Added support for customizing handling of incoming connections with :meth:`~legacy.server.WebSocketServerProtocol.process_request`. @@ -423,8 +470,8 @@ Also: * Added an optional C extension to speed up low-level operations. -* An invalid response status code during :func:`~legacy.client.connect` now raises - :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute. +* An invalid response status code during :func:`~legacy.client.connect` now + raises :class:`~exceptions.InvalidStatusCode` with a ``code`` attribute. * Providing a ``sock`` argument to :func:`~legacy.client.connect` no longer crashes. @@ -432,6 +479,8 @@ Also: 3.3 ... +*March 29, 2017* + * Ensured compatibility with Python 3.6. * Reduced noise in logs caused by connection resets. @@ -441,14 +490,18 @@ Also: 3.2 ... +*August 17, 2016* + * Added ``timeout``, ``max_size``, and ``max_queue`` arguments to - :func:`~legacy.client.connect()` and :func:`~legacy.server.serve`. + :func:`~legacy.client.connect` and :func:`~legacy.server.serve`. * Made server shutdown more robust. 3.1 ... +*April 21, 2016* + * Avoided a warning when closing a connection before the opening handshake. * Added flow control for incoming data. @@ -456,6 +509,8 @@ Also: 3.0 ... +*December 25, 2015* + .. warning:: **Version 3.0 introduces a backwards-incompatible change in the** @@ -463,9 +518,9 @@ Also: **If you're upgrading from 2.x or earlier, please read this carefully.** - :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` used to return ``None`` - when the connection was closed. This required checking the return value of - every call:: + :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` used to return + ``None`` when the connection was closed. This required checking the return + value of every call:: message = await websocket.recv() if message is None: @@ -484,13 +539,13 @@ Also: previous behavior can be restored by passing ``legacy_recv=True`` to :func:`~legacy.server.serve`, :func:`~legacy.client.connect`, :class:`~legacy.server.WebSocketServerProtocol`, or - :class:`~legacy.client.WebSocketClientProtocol`. ``legacy_recv`` isn't documented - in their signatures but isn't scheduled for deprecation either. + :class:`~legacy.client.WebSocketClientProtocol`. ``legacy_recv`` isn't + documented in their signatures but isn't scheduled for deprecation either. Also: -* :func:`~legacy.client.connect` can be used as an asynchronous context manager on - Python ≥ 3.5.1. +* :func:`~legacy.client.connect` can be used as an asynchronous context + manager on Python ≥ 3.5.1. * Updated documentation with ``await`` and ``async`` syntax from Python 3.5. @@ -498,7 +553,8 @@ Also: :meth:`~legacy.protocol.WebSocketCommonProtocol.pong` support data passed as :class:`str` in addition to :class:`bytes`. -* Worked around an asyncio bug affecting connection termination under load. +* Worked around an :mod:`asyncio` bug affecting connection termination under + load. * Made ``state_name`` attribute on protocols a public API. @@ -507,6 +563,8 @@ Also: 2.7 ... +*November 18, 2015* + * Added compatibility with Python 3.5. * Refreshed documentation. @@ -514,6 +572,8 @@ Also: 2.6 ... +*August 18, 2015* + * Added ``local_address`` and ``remote_address`` attributes on protocols. * Closed open connections with code 1001 when a server shuts down. @@ -523,19 +583,21 @@ Also: 2.5 ... +*July 28, 2015* + * Improved documentation. * Provided access to handshake request and response HTTP headers. * Allowed customizing handshake request and response HTTP headers. -* Supported running on a non-default event loop. +* Added support for running on a non-default event loop. * Returned a 403 status code instead of 400 when the request Origin isn't allowed. -* Canceling :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` no longer drops - the next message. +* Canceling :meth:`~legacy.protocol.WebSocketCommonProtocol.recv` no longer + drops the next message. * Clarified that the closing handshake can be initiated by the client. @@ -548,9 +610,9 @@ Also: 2.4 ... -* Added support for subprotocols. +*January 31, 2015* -* Supported non-default event loop. +* Added support for subprotocols. * Added ``loop`` argument to :func:`~legacy.client.connect` and :func:`~legacy.server.serve`. @@ -558,16 +620,22 @@ Also: 2.3 ... +*November 3, 2014* + * Improved compliance of close codes. 2.2 ... +*July 28, 2014* + * Added support for limiting message size. 2.1 ... +*April 26, 2014* + * Added ``host``, ``port`` and ``secure`` attributes on protocols. * Added support for providing and checking Origin_. @@ -577,6 +645,8 @@ Also: 2.0 ... +*February 16, 2014* + .. warning:: **Version 2.0 introduces a backwards-incompatible change in the** @@ -603,4 +673,6 @@ Also: 1.0 ... +*November 14, 2013* + * Initial public release. From 94256f4f41ef024f7f511a573763bd755f5f1b46 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 29 Nov 2020 18:02:27 +0100 Subject: [PATCH 241/281] Update word list for spell check. --- docs/spelling_wordlist.txt | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 1eacc491d..dd3500b73 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -5,14 +5,21 @@ awaitable aymeric backpressure Backpressure +balancer +balancers Bitcoin +bottlenecked bufferbloat Bufferbloat bugfix bytestring bytestrings changelog +coroutine +coroutines +cryptocurrencies cryptocurrency +Ctrl daemonize fractalideas iterable @@ -20,18 +27,25 @@ keepalive KiB lifecycle Lifecycle +lookups MiB nginx +parsers permessage pong pongs Pythonic serializers +Subclasses +subclasses subclassing subprotocol subprotocols +Tidelift TLS +tox Unparse +unregister uple username websocket From 42f0e2c0b8e994c33b792208adff32bea1cdff4f Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 29 Nov 2020 22:01:42 +0100 Subject: [PATCH 242/281] Add helper to manage aliases and deprecations. This may save a little bit of CPU and memory by avoiding unnecessary imports too, especially as the library grows. --- src/websockets/__init__.py | 72 +++++++++++++++++++++++----- src/websockets/auth.py | 4 -- src/websockets/client.py | 11 ++++- src/websockets/framing.py | 6 --- src/websockets/handshake.py | 45 ------------------ src/websockets/http.py | 45 +++++++----------- src/websockets/imports.py | 95 +++++++++++++++++++++++++++++++++++++ src/websockets/protocol.py | 1 - src/websockets/server.py | 18 ++++--- tests/test_auth.py | 2 - tests/test_exports.py | 9 ++++ tests/test_framing.py | 9 ---- tests/test_handshake.py | 2 - tests/test_imports.py | 53 +++++++++++++++++++++ tests/test_protocol.py | 2 - 15 files changed, 256 insertions(+), 118 deletions(-) delete mode 100644 src/websockets/auth.py delete mode 100644 src/websockets/framing.py delete mode 100644 src/websockets/handshake.py create mode 100644 src/websockets/imports.py delete mode 100644 src/websockets/protocol.py delete mode 100644 tests/test_auth.py delete mode 100644 tests/test_framing.py delete mode 100644 tests/test_handshake.py create mode 100644 tests/test_imports.py delete mode 100644 tests/test_protocol.py diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 0242e7942..580a3960f 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -1,19 +1,8 @@ -# This relies on each of the submodules having an __all__ variable. - -from .client import * -from .datastructures import * # noqa -from .exceptions import * # noqa -from .legacy.auth import * # noqa -from .legacy.client import * # noqa -from .legacy.protocol import * # noqa -from .legacy.server import * # noqa -from .server import * -from .typing import * # noqa -from .uri import * # noqa +from .imports import lazy_import from .version import version as __version__ # noqa -__all__ = [ +__all__ = [ # noqa "AbortHandshake", "basic_auth_protocol_factory", "BasicAuthWebSocketServerProtocol", @@ -58,3 +47,60 @@ "WebSocketServerProtocol", "WebSocketURI", ] + +lazy_import( + globals(), + aliases={ + "auth": ".legacy", + "basic_auth_protocol_factory": ".legacy.auth", + "BasicAuthWebSocketServerProtocol": ".legacy.auth", + "ClientConnection": ".client", + "connect": ".legacy.client", + "unix_connect": ".legacy.client", + "WebSocketClientProtocol": ".legacy.client", + "Headers": ".datastructures", + "MultipleValuesError": ".datastructures", + "WebSocketException": ".exceptions", + "ConnectionClosed": ".exceptions", + "ConnectionClosedError": ".exceptions", + "ConnectionClosedOK": ".exceptions", + "InvalidHandshake": ".exceptions", + "SecurityError": ".exceptions", + "InvalidMessage": ".exceptions", + "InvalidHeader": ".exceptions", + "InvalidHeaderFormat": ".exceptions", + "InvalidHeaderValue": ".exceptions", + "InvalidOrigin": ".exceptions", + "InvalidUpgrade": ".exceptions", + "InvalidStatusCode": ".exceptions", + "NegotiationError": ".exceptions", + "DuplicateParameter": ".exceptions", + "InvalidParameterName": ".exceptions", + "InvalidParameterValue": ".exceptions", + "AbortHandshake": ".exceptions", + "RedirectHandshake": ".exceptions", + "InvalidState": ".exceptions", + "InvalidURI": ".exceptions", + "PayloadTooBig": ".exceptions", + "ProtocolError": ".exceptions", + "WebSocketProtocolError": ".exceptions", + "protocol": ".legacy", + "WebSocketCommonProtocol": ".legacy.protocol", + "ServerConnection": ".server", + "serve": ".legacy.server", + "unix_serve": ".legacy.server", + "WebSocketServerProtocol": ".legacy.server", + "WebSocketServer": ".legacy.server", + "Data": ".typing", + "Origin": ".typing", + "ExtensionHeader": ".typing", + "ExtensionParameter": ".typing", + "Subprotocol": ".typing", + "parse_uri": ".uri", + "WebSocketURI": ".uri", + }, + deprecated_aliases={ + "framing": ".legacy", + "handshake": ".legacy", + }, +) diff --git a/src/websockets/auth.py b/src/websockets/auth.py deleted file mode 100644 index c8839c401..000000000 --- a/src/websockets/auth.py +++ /dev/null @@ -1,4 +0,0 @@ -from .legacy.auth import BasicAuthWebSocketServerProtocol, basic_auth_protocol_factory - - -__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"] diff --git a/src/websockets/client.py b/src/websockets/client.py index 8cababed5..91dd1662e 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -24,7 +24,7 @@ ) from .http import USER_AGENT, build_host from .http11 import Request, Response -from .legacy.client import WebSocketClientProtocol, connect, unix_connect # noqa +from .imports import lazy_import from .typing import ( ConnectionOption, ExtensionHeader, @@ -36,6 +36,15 @@ from .utils import accept_key, generate_key +lazy_import( + globals(), + aliases={ + "connect": ".legacy.client", + "unix_connect": ".legacy.client", + "WebSocketClientProtocol": ".legacy.client", + }, +) + __all__ = ["ClientConnection"] logger = logging.getLogger(__name__) diff --git a/src/websockets/framing.py b/src/websockets/framing.py deleted file mode 100644 index 2dadb5610..000000000 --- a/src/websockets/framing.py +++ /dev/null @@ -1,6 +0,0 @@ -import warnings - -from .legacy.framing import * # noqa - - -warnings.warn("websockets.framing is deprecated", DeprecationWarning) diff --git a/src/websockets/handshake.py b/src/websockets/handshake.py deleted file mode 100644 index cc4010d41..000000000 --- a/src/websockets/handshake.py +++ /dev/null @@ -1,45 +0,0 @@ -import warnings - -from .datastructures import Headers - - -__all__ = ["build_request", "check_request", "build_response", "check_response"] - - -# Backwards compatibility with previously documented public APIs - - -def build_request(headers: Headers) -> str: # pragma: no cover - warnings.warn( - "websockets.handshake.build_request is deprecated", DeprecationWarning - ) - from .legacy.handshake import build_request - - return build_request(headers) - - -def check_request(headers: Headers) -> str: # pragma: no cover - warnings.warn( - "websockets.handshake.check_request is deprecated", DeprecationWarning - ) - from .legacy.handshake import check_request - - return check_request(headers) - - -def build_response(headers: Headers, key: str) -> None: # pragma: no cover - warnings.warn( - "websockets.handshake.build_response is deprecated", DeprecationWarning - ) - from .legacy.handshake import build_response - - return build_response(headers, key) - - -def check_response(headers: Headers, key: str) -> None: # pragma: no cover - warnings.warn( - "websockets.handshake.check_response is deprecated", DeprecationWarning - ) - from .legacy.handshake import check_response - - return check_response(headers, key) diff --git a/src/websockets/http.py b/src/websockets/http.py index b05b78455..9092836c2 100644 --- a/src/websockets/http.py +++ b/src/websockets/http.py @@ -1,15 +1,27 @@ -import asyncio import ipaddress import sys -import warnings -from typing import Tuple -# For backwards compatibility: -# Headers and MultipleValuesError used to be defined in this module -from .datastructures import Headers, MultipleValuesError # noqa +from .imports import lazy_import from .version import version as websockets_version +# For backwards compatibility: + + +lazy_import( + globals(), + # Headers and MultipleValuesError used to be defined in this module. + aliases={ + "Headers": ".datastructures", + "MultipleValuesError": ".datastructures", + }, + deprecated_aliases={ + "read_request": ".legacy.http", + "read_response": ".legacy.http", + }, +) + + __all__ = ["USER_AGENT", "build_host"] @@ -38,24 +50,3 @@ def build_host(host: str, port: int, secure: bool) -> str: host = f"{host}:{port}" return host - - -# Backwards compatibility with previously documented public APIs - - -async def read_request( - stream: asyncio.StreamReader, -) -> Tuple[str, Headers]: # pragma: no cover - warnings.warn("websockets.http.read_request is deprecated", DeprecationWarning) - from .legacy.http import read_request - - return await read_request(stream) - - -async def read_response( - stream: asyncio.StreamReader, -) -> Tuple[int, str, Headers]: # pragma: no cover - warnings.warn("websockets.http.read_response is deprecated", DeprecationWarning) - from .legacy.http import read_response - - return await read_response(stream) diff --git a/src/websockets/imports.py b/src/websockets/imports.py new file mode 100644 index 000000000..9a4cfd98a --- /dev/null +++ b/src/websockets/imports.py @@ -0,0 +1,95 @@ +import importlib +import sys +import warnings +from typing import Any, Dict, Iterable, Optional + + +__all__ = ["lazy_import"] + + +def lazy_import( + namespace: Dict[str, Any], + aliases: Optional[Dict[str, str]] = None, + deprecated_aliases: Optional[Dict[str, str]] = None, +) -> None: + """ + Provide lazy, module-level imports. + + Typical use:: + + __getattr__, __dir__ = lazy_import( + globals(), + aliases={ + "": "", + ... + }, + deprecated_aliases={ + ..., + } + ) + + This function defines __getattr__ and __dir__ per PEP 562. + + On Python 3.6 and earlier, it falls back to non-lazy imports and doesn't + raise deprecation warnings. + + """ + if aliases is None: + aliases = {} + if deprecated_aliases is None: + deprecated_aliases = {} + + namespace_set = set(namespace) + aliases_set = set(aliases) + deprecated_aliases_set = set(deprecated_aliases) + + assert not namespace_set & aliases_set, "namespace conflict" + assert not namespace_set & deprecated_aliases_set, "namespace conflict" + assert not aliases_set & deprecated_aliases_set, "namespace conflict" + + package = namespace["__name__"] + + if sys.version_info[:2] >= (3, 7): + + def __getattr__(name: str) -> Any: + assert aliases is not None # mypy cannot figure this out + try: + source = aliases[name] + except KeyError: + pass + else: + module = importlib.import_module(source, package) + return getattr(module, name) + + assert deprecated_aliases is not None # mypy cannot figure this out + try: + source = deprecated_aliases[name] + except KeyError: + pass + else: + warnings.warn( + f"{package}.{name} is deprecated", + DeprecationWarning, + stacklevel=2, + ) + module = importlib.import_module(source, package) + return getattr(module, name) + + raise AttributeError(f"module {package!r} has no attribute {name!r}") + + namespace["__getattr__"] = __getattr__ + + def __dir__() -> Iterable[str]: + return sorted(namespace_set | aliases_set | deprecated_aliases_set) + + namespace["__dir__"] = __dir__ + + else: # pragma: no cover + + for name, source in aliases.items(): + module = importlib.import_module(source, package) + namespace[name] = getattr(module, name) + + for name, source in deprecated_aliases.items(): + module = importlib.import_module(source, package) + namespace[name] = getattr(module, name) diff --git a/src/websockets/protocol.py b/src/websockets/protocol.py deleted file mode 100644 index 287f92a57..000000000 --- a/src/websockets/protocol.py +++ /dev/null @@ -1 +0,0 @@ -from .legacy.protocol import * # noqa diff --git a/src/websockets/server.py b/src/websockets/server.py index bd527be74..67ab83031 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -26,12 +26,7 @@ ) from .http import USER_AGENT from .http11 import Request, Response -from .legacy.server import ( # noqa - WebSocketServer, - WebSocketServerProtocol, - serve, - unix_serve, -) +from .imports import lazy_import from .typing import ( ConnectionOption, ExtensionHeader, @@ -42,6 +37,17 @@ from .utils import accept_key +lazy_import( + globals(), + aliases={ + "serve": ".legacy.server", + "unix_serve": ".legacy.server", + "WebSocketServerProtocol": ".legacy.server", + "WebSocketServer": ".legacy.server", + }, +) + + __all__ = ["ServerConnection"] logger = logging.getLogger(__name__) diff --git a/tests/test_auth.py b/tests/test_auth.py deleted file mode 100644 index 01ca207c7..000000000 --- a/tests/test_auth.py +++ /dev/null @@ -1,2 +0,0 @@ -# Check that the legacy auth module imports without an exception. -from websockets.auth import * # noqa diff --git a/tests/test_exports.py b/tests/test_exports.py index 8e4330304..568c50c54 100644 --- a/tests/test_exports.py +++ b/tests/test_exports.py @@ -1,6 +1,15 @@ import unittest import websockets +import websockets.client +import websockets.exceptions +import websockets.legacy.auth +import websockets.legacy.client +import websockets.legacy.protocol +import websockets.legacy.server +import websockets.server +import websockets.typing +import websockets.uri combined_exports = ( diff --git a/tests/test_framing.py b/tests/test_framing.py deleted file mode 100644 index d6fa6352a..000000000 --- a/tests/test_framing.py +++ /dev/null @@ -1,9 +0,0 @@ -import warnings - - -with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", "websockets.framing is deprecated", DeprecationWarning - ) - # Check that the legacy framing module imports without an exception. - from websockets.framing import * # noqa diff --git a/tests/test_handshake.py b/tests/test_handshake.py deleted file mode 100644 index 8c35c9714..000000000 --- a/tests/test_handshake.py +++ /dev/null @@ -1,2 +0,0 @@ -# Check that the legacy handshake module imports without an exception. -from websockets.handshake import * # noqa diff --git a/tests/test_imports.py b/tests/test_imports.py new file mode 100644 index 000000000..113564e9f --- /dev/null +++ b/tests/test_imports.py @@ -0,0 +1,53 @@ +import types +import unittest +import warnings + +from websockets.imports import * + + +foo = object() + +bar = object() + + +class ImportsTests(unittest.TestCase): + def test_get_alias(self): + mod = types.ModuleType("tests.test_imports.test_alias") + lazy_import(vars(mod), aliases={"foo": ".."}) + + self.assertEqual(mod.foo, foo) + + def test_get_deprecated_alias(self): + mod = types.ModuleType("tests.test_imports.test_alias") + lazy_import(vars(mod), deprecated_aliases={"bar": ".."}) + + with warnings.catch_warnings(record=True) as recorded_warnings: + self.assertEqual(mod.bar, bar) + + self.assertEqual(len(recorded_warnings), 1) + warning = recorded_warnings[0].message + self.assertEqual( + str(warning), "tests.test_imports.test_alias.bar is deprecated" + ) + self.assertEqual(type(warning), DeprecationWarning) + + def test_dir(self): + mod = types.ModuleType("tests.test_imports.test_alias") + lazy_import(vars(mod), aliases={"foo": ".."}, deprecated_aliases={"bar": ".."}) + + self.assertEqual( + [item for item in dir(mod) if not item[:2] == item[-2:] == "__"], + ["bar", "foo"], + ) + + def test_attribute_error(self): + mod = types.ModuleType("tests.test_imports.test_alias") + lazy_import(vars(mod)) + + with self.assertRaises(AttributeError) as raised: + mod.foo + + self.assertEqual( + str(raised.exception), + "module 'tests.test_imports.test_alias' has no attribute 'foo'", + ) diff --git a/tests/test_protocol.py b/tests/test_protocol.py deleted file mode 100644 index f896fcae4..000000000 --- a/tests/test_protocol.py +++ /dev/null @@ -1,2 +0,0 @@ -# Check that the legacy protocol module imports without an exception. -from websockets.protocol import * # noqa From 965f8ec77347adaaf23c82eef693c9882269b46c Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 30 Nov 2020 21:46:25 +0100 Subject: [PATCH 243/281] Fix lazy imports of objects on Python 3.6. --- src/websockets/imports.py | 34 +++++++++++++++++++++++++--------- tests/test_imports.py | 39 +++++++++++++++++++++++++++------------ 2 files changed, 52 insertions(+), 21 deletions(-) diff --git a/src/websockets/imports.py b/src/websockets/imports.py index 9a4cfd98a..efd3eabf3 100644 --- a/src/websockets/imports.py +++ b/src/websockets/imports.py @@ -1,4 +1,3 @@ -import importlib import sys import warnings from typing import Any, Dict, Iterable, Optional @@ -7,6 +6,27 @@ __all__ = ["lazy_import"] +def import_name(name: str, source: str, namespace: Dict[str, Any]) -> Any: + """ + Import from in . + + There are two cases: + + - is an object defined in + - is a submodule of source + + Neither __import__ nor importlib.import_module does exactly this. + __import__ is closer to the intended behavior. + + """ + level = 0 + while source[level] == ".": + level += 1 + assert level < len(source), "importing from parent isn't supported" + module = __import__(source[level:], namespace, None, [name], level) + return getattr(module, name) + + def lazy_import( namespace: Dict[str, Any], aliases: Optional[Dict[str, str]] = None, @@ -58,8 +78,7 @@ def __getattr__(name: str) -> Any: except KeyError: pass else: - module = importlib.import_module(source, package) - return getattr(module, name) + return import_name(name, source, namespace) assert deprecated_aliases is not None # mypy cannot figure this out try: @@ -72,8 +91,7 @@ def __getattr__(name: str) -> Any: DeprecationWarning, stacklevel=2, ) - module = importlib.import_module(source, package) - return getattr(module, name) + return import_name(name, source, namespace) raise AttributeError(f"module {package!r} has no attribute {name!r}") @@ -87,9 +105,7 @@ def __dir__() -> Iterable[str]: else: # pragma: no cover for name, source in aliases.items(): - module = importlib.import_module(source, package) - namespace[name] = getattr(module, name) + namespace[name] = import_name(name, source, namespace) for name, source in deprecated_aliases.items(): - module = importlib.import_module(source, package) - namespace[name] = getattr(module, name) + namespace[name] = import_name(name, source, namespace) diff --git a/tests/test_imports.py b/tests/test_imports.py index 113564e9f..d84808902 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -1,3 +1,4 @@ +import sys import types import unittest import warnings @@ -11,18 +12,30 @@ class ImportsTests(unittest.TestCase): + def setUp(self): + self.mod = types.ModuleType("tests.test_imports.test_alias") + self.mod.__package__ = self.mod.__name__ + def test_get_alias(self): - mod = types.ModuleType("tests.test_imports.test_alias") - lazy_import(vars(mod), aliases={"foo": ".."}) + lazy_import( + vars(self.mod), + aliases={"foo": "...test_imports"}, + ) - self.assertEqual(mod.foo, foo) + self.assertEqual(self.mod.foo, foo) def test_get_deprecated_alias(self): - mod = types.ModuleType("tests.test_imports.test_alias") - lazy_import(vars(mod), deprecated_aliases={"bar": ".."}) + lazy_import( + vars(self.mod), + deprecated_aliases={"bar": "...test_imports"}, + ) with warnings.catch_warnings(record=True) as recorded_warnings: - self.assertEqual(mod.bar, bar) + self.assertEqual(self.mod.bar, bar) + + # No warnings raised on pre-PEP 526 Python. + if sys.version_info[:2] < (3, 7): # pragma: no cover + return self.assertEqual(len(recorded_warnings), 1) warning = recorded_warnings[0].message @@ -32,20 +45,22 @@ def test_get_deprecated_alias(self): self.assertEqual(type(warning), DeprecationWarning) def test_dir(self): - mod = types.ModuleType("tests.test_imports.test_alias") - lazy_import(vars(mod), aliases={"foo": ".."}, deprecated_aliases={"bar": ".."}) + lazy_import( + vars(self.mod), + aliases={"foo": "...test_imports"}, + deprecated_aliases={"bar": "...test_imports"}, + ) self.assertEqual( - [item for item in dir(mod) if not item[:2] == item[-2:] == "__"], + [item for item in dir(self.mod) if not item[:2] == item[-2:] == "__"], ["bar", "foo"], ) def test_attribute_error(self): - mod = types.ModuleType("tests.test_imports.test_alias") - lazy_import(vars(mod)) + lazy_import(vars(self.mod)) with self.assertRaises(AttributeError) as raised: - mod.foo + self.mod.foo self.assertEqual( str(raised.exception), From ecf64e7a56ee85e10a812139a4aee09e736aa241 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Mon, 30 Nov 2020 22:36:21 +0100 Subject: [PATCH 244/281] Handle non-contiguous memoryviews in C extension. This avoids the special-case in Python code. --- src/websockets/frames.py | 11 ++------ src/websockets/speedups.c | 51 ++++++++++++++++++----------------- tests/legacy/test_protocol.py | 30 --------------------- tests/test_frames.py | 9 ------- tests/test_utils.py | 24 +++-------------- 5 files changed, 32 insertions(+), 93 deletions(-) diff --git a/src/websockets/frames.py b/src/websockets/frames.py index 74223c0e8..71783e176 100644 --- a/src/websockets/frames.py +++ b/src/websockets/frames.py @@ -263,13 +263,8 @@ def prepare_data(data: Data) -> Tuple[int, bytes]: """ if isinstance(data, str): return OP_TEXT, data.encode("utf-8") - elif isinstance(data, (bytes, bytearray)): + elif isinstance(data, (bytes, bytearray, memoryview)): return OP_BINARY, data - elif isinstance(data, memoryview): - if data.c_contiguous: - return OP_BINARY, data - else: - return OP_BINARY, data.tobytes() else: raise TypeError("data must be bytes-like or str") @@ -290,10 +285,8 @@ def prepare_ctrl(data: Data) -> bytes: """ if isinstance(data, str): return data.encode("utf-8") - elif isinstance(data, (bytes, bytearray)): + elif isinstance(data, (bytes, bytearray, memoryview)): return bytes(data) - elif isinstance(data, memoryview): - return data.tobytes() else: raise TypeError("data must be bytes-like or str") diff --git a/src/websockets/speedups.c b/src/websockets/speedups.c index ede181e5d..fc328e528 100644 --- a/src/websockets/speedups.c +++ b/src/websockets/speedups.c @@ -13,39 +13,35 @@ static const Py_ssize_t MASK_LEN = 4; /* Similar to PyBytes_AsStringAndSize, but accepts more types */ static int -_PyBytesLike_AsStringAndSize(PyObject *obj, char **buffer, Py_ssize_t *length) +_PyBytesLike_AsStringAndSize(PyObject *obj, PyObject **tmp, char **buffer, Py_ssize_t *length) { - // This supports bytes, bytearrays, and C-contiguous memoryview objects, - // which are the most useful data structures for handling byte streams. - // websockets.framing.prepare_data() returns only values of these types. - // Any object implementing the buffer protocol could be supported, however - // that would require allocation or copying memory, which is expensive. + // This supports bytes, bytearrays, and memoryview objects, + // which are common data structures for handling byte streams. + // websockets.framing.prepare_data() returns only these types. + // If *tmp isn't NULL, the caller gets a new reference. if (PyBytes_Check(obj)) { + *tmp = NULL; *buffer = PyBytes_AS_STRING(obj); *length = PyBytes_GET_SIZE(obj); } else if (PyByteArray_Check(obj)) { + *tmp = NULL; *buffer = PyByteArray_AS_STRING(obj); *length = PyByteArray_GET_SIZE(obj); } else if (PyMemoryView_Check(obj)) { - Py_buffer *mv_buf; - mv_buf = PyMemoryView_GET_BUFFER(obj); - if (PyBuffer_IsContiguous(mv_buf, 'C')) - { - *buffer = mv_buf->buf; - *length = mv_buf->len; - } - else + *tmp = PyMemoryView_GetContiguous(obj, PyBUF_READ, 'C'); + if (*tmp == NULL) { - PyErr_Format( - PyExc_TypeError, - "expected a contiguous memoryview"); return -1; } + Py_buffer *mv_buf; + mv_buf = PyMemoryView_GET_BUFFER(*tmp); + *buffer = mv_buf->buf; + *length = mv_buf->len; } else { @@ -74,15 +70,17 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) // A pointer to a char * + length will be extracted from the data and mask // arguments, possibly via a Py_buffer. + PyObject *input_tmp = NULL; char *input; Py_ssize_t input_len; + PyObject *mask_tmp = NULL; char *mask; Py_ssize_t mask_len; // Initialize a PyBytesObject then get a pointer to the underlying char * // in order to avoid an extra memory copy in PyBytes_FromStringAndSize. - PyObject *result; + PyObject *result = NULL; char *output; // Other variables. @@ -94,23 +92,23 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) if (!PyArg_ParseTupleAndKeywords( args, kwds, "OO", kwlist, &input_obj, &mask_obj)) { - return NULL; + goto exit; } - if (_PyBytesLike_AsStringAndSize(input_obj, &input, &input_len) == -1) + if (_PyBytesLike_AsStringAndSize(input_obj, &input_tmp, &input, &input_len) == -1) { - return NULL; + goto exit; } - if (_PyBytesLike_AsStringAndSize(mask_obj, &mask, &mask_len) == -1) + if (_PyBytesLike_AsStringAndSize(mask_obj, &mask_tmp, &mask, &mask_len) == -1) { - return NULL; + goto exit; } if (mask_len != MASK_LEN) { PyErr_SetString(PyExc_ValueError, "mask must contain 4 bytes"); - return NULL; + goto exit; } // Create output. @@ -118,7 +116,7 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) result = PyBytes_FromStringAndSize(NULL, input_len); if (result == NULL) { - return NULL; + goto exit; } // Since we juste created result, we don't need error checks. @@ -172,6 +170,9 @@ apply_mask(PyObject *self, PyObject *args, PyObject *kwds) output[i] = input[i] ^ mask[i & (MASK_LEN - 1)]; } +exit: + Py_XDECREF(input_tmp); + Py_XDECREF(mask_tmp); return result; } diff --git a/tests/legacy/test_protocol.py b/tests/legacy/test_protocol.py index 218d05376..a89bcc88b 100644 --- a/tests/legacy/test_protocol.py +++ b/tests/legacy/test_protocol.py @@ -580,10 +580,6 @@ def test_send_binary_from_memoryview(self): self.loop.run_until_complete(self.protocol.send(memoryview(b"tea"))) self.assertOneFrameSent(True, OP_BINARY, b"tea") - def test_send_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.send(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_BINARY, b"tea") - def test_send_dict(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.send({"not": "encoded"})) @@ -624,14 +620,6 @@ def test_send_iterable_binary_from_memoryview(self): (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) - def test_send_iterable_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete( - self.protocol.send([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - def test_send_empty_iterable(self): self.loop.run_until_complete(self.protocol.send([])) self.assertNoFrameSent() @@ -697,16 +685,6 @@ def test_send_async_iterable_binary_from_memoryview(self): (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") ) - def test_send_async_iterable_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete( - self.protocol.send( - async_iterable([memoryview(b"ttee")[::2], memoryview(b"aa")[::2]]) - ) - ) - self.assertFramesSent( - (False, OP_BINARY, b"te"), (False, OP_CONT, b"a"), (True, OP_CONT, b"") - ) - def test_send_empty_async_iterable(self): self.loop.run_until_complete(self.protocol.send(async_iterable([]))) self.assertNoFrameSent() @@ -799,10 +777,6 @@ def test_ping_binary_from_memoryview(self): self.loop.run_until_complete(self.protocol.ping(memoryview(b"tea"))) self.assertOneFrameSent(True, OP_PING, b"tea") - def test_ping_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.ping(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_PING, b"tea") - def test_ping_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.ping(42)) @@ -856,10 +830,6 @@ def test_pong_binary_from_memoryview(self): self.loop.run_until_complete(self.protocol.pong(memoryview(b"tea"))) self.assertOneFrameSent(True, OP_PONG, b"tea") - def test_pong_binary_from_non_contiguous_memoryview(self): - self.loop.run_until_complete(self.protocol.pong(memoryview(b"tteeaa")[::2])) - self.assertOneFrameSent(True, OP_PONG, b"tea") - def test_pong_type_error(self): with self.assertRaises(TypeError): self.loop.run_until_complete(self.protocol.pong(42)) diff --git a/tests/test_frames.py b/tests/test_frames.py index 4d10c6ef2..13a712322 100644 --- a/tests/test_frames.py +++ b/tests/test_frames.py @@ -218,12 +218,6 @@ def test_prepare_data_memoryview(self): (OP_BINARY, memoryview(b"tea")), ) - def test_prepare_data_non_contiguous_memoryview(self): - self.assertEqual( - prepare_data(memoryview(b"tteeaa")[::2]), - (OP_BINARY, b"tea"), - ) - def test_prepare_data_list(self): with self.assertRaises(TypeError): prepare_data([]) @@ -246,9 +240,6 @@ def test_prepare_ctrl_bytearray(self): def test_prepare_ctrl_memoryview(self): self.assertEqual(prepare_ctrl(memoryview(b"tea")), b"tea") - def test_prepare_ctrl_non_contiguous_memoryview(self): - self.assertEqual(prepare_ctrl(memoryview(b"tteeaa")[::2]), b"tea") - def test_prepare_ctrl_list(self): with self.assertRaises(TypeError): prepare_ctrl([]) diff --git a/tests/test_utils.py b/tests/test_utils.py index b490c2409..a9ea8dcbd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -43,21 +43,18 @@ def test_apply_mask(self): self.assertEqual(result, data_out) def test_apply_mask_memoryview(self): - for data_type, mask_type in self.apply_mask_type_combos: + for mask_type in [bytes, bytearray]: for data_in, mask, data_out in self.apply_mask_test_values: - data_in, mask = data_type(data_in), mask_type(mask) - data_in, mask = memoryview(data_in), memoryview(mask) + data_in, mask = memoryview(data_in), mask_type(mask) with self.subTest(data_in=data_in, mask=mask): result = self.apply_mask(data_in, mask) self.assertEqual(result, data_out) def test_apply_mask_non_contiguous_memoryview(self): - for data_type, mask_type in self.apply_mask_type_combos: + for mask_type in [bytes, bytearray]: for data_in, mask, data_out in self.apply_mask_test_values: - data_in, mask = data_type(data_in), mask_type(mask) - data_in, mask = memoryview(data_in), memoryview(mask) - data_in, mask = data_in[::-1], mask[::-1] + data_in, mask = memoryview(data_in)[::-1], mask_type(mask)[::-1] data_out = data_out[::-1] with self.subTest(data_in=data_in, mask=mask): @@ -92,16 +89,3 @@ class SpeedupsTests(ApplyMaskTests): @staticmethod def apply_mask(*args, **kwargs): return c_apply_mask(*args, **kwargs) - - def test_apply_mask_non_contiguous_memoryview(self): - for data_type, mask_type in self.apply_mask_type_combos: - for data_in, mask, data_out in self.apply_mask_test_values: - data_in, mask = data_type(data_in), mask_type(mask) - data_in, mask = memoryview(data_in), memoryview(mask) - data_in, mask = data_in[::-1], mask[::-1] - data_out = data_out[::-1] - - with self.subTest(data_in=data_in, mask=mask): - # The C extension only supports contiguous memoryviews. - with self.assertRaises(TypeError): - self.apply_mask(data_in, mask) From 6167b5d8d8f7ec7d96f925089813503ee53b2983 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 11 Dec 2020 22:02:12 +0100 Subject: [PATCH 245/281] Clarify there's no guarantee to yield control. Fix #865. --- src/websockets/legacy/protocol.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py index e4592b8a0..aa1b156c6 100644 --- a/src/websockets/legacy/protocol.py +++ b/src/websockets/legacy/protocol.py @@ -555,15 +555,15 @@ async def send( its :meth:`~dict.keys` method and pass the result to :meth:`send`. Canceling :meth:`send` is discouraged. Instead, you should close the - connection with :meth:`close`. Indeed, there only two situations where - :meth:`send` yields control to the event loop: + connection with :meth:`close`. Indeed, there are only two situations + where :meth:`send` may yield control to the event loop: 1. The write buffer is full. If you don't want to wait until enough data is sent, your only alternative is to close the connection. :meth:`close` will likely time out then abort the TCP connection. - 2. ``message`` is an asynchronous iterator. Stopping in the middle of - a fragmented message will cause a protocol error. Closing the - connection has the same effect. + 2. ``message`` is an asynchronous iterator that yields control. + Stopping in the middle of a fragmented message will cause a + protocol error. Closing the connection has the same effect. :raises TypeError: for unsupported inputs From dccba0efb3bcb554fad85d72b4f6aa392626caac Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 13 Dec 2020 10:57:15 +0100 Subject: [PATCH 246/281] Fix sending fragmented, compressed messages. Fix #866. --- docs/changelog.rst | 2 ++ .../extensions/permessage_deflate.py | 26 +++++++++++-------- tests/extensions/test_permessage_deflate.py | 6 ++--- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 8d255fdfd..e8a41b53c 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -55,6 +55,8 @@ fixing regressions shortly after a release. * Raised an error when passing a :class:`dict` to :meth:`~legacy.protocol.WebSocketCommonProtocol.send`. +* Fixed sending fragmented, compressed messages. + * Fixed ``Host`` header sent when connecting to an IPv6 address. * Aligned maximum cookie size with popular web browsers. diff --git a/src/websockets/extensions/permessage_deflate.py b/src/websockets/extensions/permessage_deflate.py index 9a3fc4ba5..4f520af38 100644 --- a/src/websockets/extensions/permessage_deflate.py +++ b/src/websockets/extensions/permessage_deflate.py @@ -100,7 +100,7 @@ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: return frame # Handle continuation data frames: - # - skip if the initial data frame wasn't encoded + # - skip if the message isn't encoded # - reset "decode continuation data" flag if it's a final frame if frame.opcode == OP_CONT: if not self.decode_cont_data: @@ -109,21 +109,23 @@ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: self.decode_cont_data = False # Handle text and binary data frames: - # - skip if the frame isn't encoded + # - skip if the message isn't encoded + # - unset the rsv1 flag on the first frame of a compressed message # - set "decode continuation data" flag if it's a non-final frame else: if not frame.rsv1: return frame - if not frame.fin: # frame.rsv1 is True at this point + frame = frame._replace(rsv1=False) + if not frame.fin: self.decode_cont_data = True # Re-initialize per-message decoder. if self.remote_no_context_takeover: self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits) - # Uncompress compressed frames. Protect against zip bombs by - # preventing zlib from decompressing more than max_length bytes - # (except when the limit is disabled with max_size = None). + # Uncompress data. Protect against zip bombs by preventing zlib from + # decompressing more than max_length bytes (except when the limit is + # disabled with max_size = None). data = frame.data if frame.fin: data += _EMPTY_UNCOMPRESSED_BLOCK @@ -136,7 +138,7 @@ def decode(self, frame: Frame, *, max_size: Optional[int] = None) -> Frame: if frame.fin and self.remote_no_context_takeover: del self.decoder - return frame._replace(data=data, rsv1=False) + return frame._replace(data=data) def encode(self, frame: Frame) -> Frame: """ @@ -147,17 +149,19 @@ def encode(self, frame: Frame) -> Frame: if frame.opcode in CTRL_OPCODES: return frame - # Since we always encode and never fragment messages, there's no logic - # similar to decode() here at this time. + # Since we always encode messages, there's no "encode continuation + # data" flag similar to "decode continuation data" at this time. if frame.opcode != OP_CONT: + # Set the rsv1 flag on the first frame of a compressed message. + frame = frame._replace(rsv1=True) # Re-initialize per-message decoder. if self.local_no_context_takeover: self.encoder = zlib.compressobj( wbits=-self.local_max_window_bits, **self.compress_settings ) - # Compress data frames. + # Compress data. data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH) if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK): data = data[:-4] @@ -166,7 +170,7 @@ def encode(self, frame: Frame) -> Frame: if frame.fin and self.local_no_context_takeover: del self.encoder - return frame._replace(data=data, rsv1=True) + return frame._replace(data=data) def _build_parameters( diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index 328861e58..7fc4c1c3a 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -113,10 +113,10 @@ def test_encode_decode_fragmented_text_frame(self): frame1._replace(rsv1=True, data=b"JNL;\xbc\x12\x00\x00\x00\xff\xff"), ) self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b"RPS\x00\x00\x00\x00\xff\xff") + enc_frame2, frame2._replace(data=b"RPS\x00\x00\x00\x00\xff\xff") ) self.assertEqual( - enc_frame3, frame3._replace(rsv1=True, data=b"J.\xca\xcf,.N\xcc+)\x06\x00") + enc_frame3, frame3._replace(data=b"J.\xca\xcf,.N\xcc+)\x06\x00") ) dec_frame1 = self.extension.decode(enc_frame1) @@ -138,7 +138,7 @@ def test_encode_decode_fragmented_binary_frame(self): enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff") ) self.assertEqual( - enc_frame2, frame2._replace(rsv1=True, data=b"*\xc9\xccM\x05\x00") + enc_frame2, frame2._replace(data=b"*\xc9\xccM\x05\x00") ) dec_frame1 = self.extension.decode(enc_frame1) From 97a601454e193d1f30d3069d8015d086a5b83aa2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Jan 2021 18:21:24 +0100 Subject: [PATCH 247/281] Support serve() with existing Unix socket. Fix #878. --- docs/changelog.rst | 2 ++ src/websockets/legacy/server.py | 15 ++++++++------- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index e8a41b53c..de4483b17 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -59,6 +59,8 @@ fixing regressions shortly after a release. * Fixed ``Host`` header sent when connecting to an IPv6 address. +* Fixed starting a Unix server listening on an existing socket. + * Aligned maximum cookie size with popular web browsers. * Ensured cancellation always propagates, even on Python versions where diff --git a/src/websockets/legacy/server.py b/src/websockets/legacy/server.py index 4dea9459d..42e0d6cf0 100644 --- a/src/websockets/legacy/server.py +++ b/src/websockets/legacy/server.py @@ -875,6 +875,7 @@ def __init__( select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] ] = None, + unix: bool = False, **kwargs: Any, ) -> None: # Backwards compatibility: close_timeout used to be called timeout. @@ -931,16 +932,16 @@ def __init__( select_subprotocol=select_subprotocol, ) - if path is None: - create_server = functools.partial( - loop.create_server, factory, host, port, **kwargs - ) - else: + if unix: # unix_serve(path) must not specify host and port parameters. assert host is None and port is None create_server = functools.partial( loop.create_unix_server, factory, path, **kwargs ) + else: + create_server = functools.partial( + loop.create_server, factory, host, port, **kwargs + ) # This is a coroutine function. self._create_server = create_server @@ -981,7 +982,7 @@ async def __await_impl__(self) -> WebSocketServer: def unix_serve( ws_handler: Callable[[WebSocketServerProtocol, str], Awaitable[Any]], - path: str, + path: Optional[str] = None, **kwargs: Any, ) -> Serve: """ @@ -997,4 +998,4 @@ def unix_serve( :param path: file system path to the Unix socket """ - return serve(ws_handler, path=path, **kwargs) + return serve(ws_handler, path=path, unix=True, **kwargs) From aa93c4ceca90a1798f86b2fc2b110a42f308d721 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 1 Jan 2021 18:34:06 +0100 Subject: [PATCH 248/281] Make black happy. --- tests/extensions/test_permessage_deflate.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/extensions/test_permessage_deflate.py b/tests/extensions/test_permessage_deflate.py index 7fc4c1c3a..908cd91a4 100644 --- a/tests/extensions/test_permessage_deflate.py +++ b/tests/extensions/test_permessage_deflate.py @@ -135,10 +135,12 @@ def test_encode_decode_fragmented_binary_frame(self): enc_frame2 = self.extension.encode(frame2) self.assertEqual( - enc_frame1, frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff") + enc_frame1, + frame1._replace(rsv1=True, data=b"*IMT\x00\x00\x00\x00\xff\xff"), ) self.assertEqual( - enc_frame2, frame2._replace(data=b"*\xc9\xccM\x05\x00") + enc_frame2, + frame2._replace(data=b"*\xc9\xccM\x05\x00"), ) dec_frame1 = self.extension.decode(enc_frame1) From dda3dfa992ddf6045be48c34143e4c1656dff9d4 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 20 Apr 2021 19:31:39 +0200 Subject: [PATCH 249/281] Document how to run on Heroku. Fix #929. --- docs/heroku.rst | 153 +++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + docs/spelling_wordlist.txt | 2 + 3 files changed, 156 insertions(+) create mode 100644 docs/heroku.rst diff --git a/docs/heroku.rst b/docs/heroku.rst new file mode 100644 index 000000000..31c4b3f19 --- /dev/null +++ b/docs/heroku.rst @@ -0,0 +1,153 @@ +Deploying to Heroku +=================== + +This guide describes how to deploy a websockets server to Heroku_. We're going +to deploy a very simple app. The process would be identical for a more +realistic app. + +.. _Heroku: https://www.heroku.com/ + +Create application +------------------ + +Deploying to Heroku requires a git repository. Let's initialize one: + +.. code:: console + + $ mkdir websockets-echo + $ cd websockets-echo + $ git init . + Initialized empty Git repository in websockets-echo/.git/ + $ git commit --allow-empty -m "Initial commit." + [master (root-commit) 1e7947d] Initial commit. + +Follow the `set-up instructions`_ to install the Heroku CLI and to log in, if +you haven't done that yet. + +.. _set-up instructions: https://devcenter.heroku.com/articles/getting-started-with-python#set-up + +Then, create a Heroku app — if you follow these instructions step-by-step, +you'll have to pick a different name because I'm already using +``websockets-echo`` on Heroku: + +.. code:: console + + $ $ heroku create websockets-echo + Creating ⬢ websockets-echo... done + https://websockets-echo.herokuapp.com/ | https://git.heroku.com/websockets-echo.git + +Here's the implementation of the app, an echo server. Save it in a file called +``app.py``: + +.. code:: python + + #!/usr/bin/env python + + import asyncio + import os + + import websockets + + async def echo(websocket, path): + async for message in websocket: + await websocket.send(message) + + start_server = websockets.serve(echo, "", int(os.environ["PORT"])) + + asyncio.get_event_loop().run_until_complete(start_server) + asyncio.get_event_loop().run_forever() + +The server relies on the ``$PORT`` environment variable to tell on which port +it will listen, according to Heroku's conventions. + +Configure deployment +-------------------- + +In order to build the app, Heroku needs to know that it depends on websockets. +Create a ``requirements.txt`` file containing this line: + +.. code:: + + websockets + +Heroku also needs to know how to run the app. Create a ``Procfile`` with this +content: + +.. code:: + + web: python app.py + +Confirm that you created the correct files and commit them to git: + +.. code:: console + + $ ls + Procfile app.py requirements.txt + $ git add . + $ git commit -m "Deploy echo server to Heroku." + [master 8418c62] Deploy echo server to Heroku. +  3 files changed, 19 insertions(+) +  create mode 100644 Procfile +  create mode 100644 app.py +  create mode 100644 requirements.txt + +Deploy +------ + +Our app is ready. Let's deploy it! + +.. code:: console + + $ git push heroku master + + ... lots of output... + + remote: -----> Launching... + remote: Released v3 + remote: https://websockets-echo.herokuapp.com/ deployed to Heroku + remote: + remote: Verifying deploy... done. + To https://git.heroku.com/websockets-echo.git +  * [new branch] master -> master + +Validate deployment +------------------- + +Of course we'd like to confirm that our application is running as expected! + +Since it's a WebSocket server, we need a WebSocket client, such as the +interactive client that comes with websockets. + +If you're currently building a websockets server, perhaps you're already in a +virtualenv where websockets is installed. If not, you can install it in a new +virtualenv as follows: + +.. code:: console + + $ python -m venv websockets-client + $ . websockets-client/bin/activate + $ pip install websockets + +Connect the interactive client — using the name of your Heroku app instead of +``websockets-echo``: + +.. code:: console + + $ python -m websockets wss://websockets-echo.herokuapp.com/ + Connected to wss://websockets-echo.herokuapp.com/. + > + +Great! Our app is running! + +In this example, I used a secure connection (``wss://``). It worked because +Heroku served a valid TLS certificate for ``websockets-echo.herokuapp.com``. +An insecure connection (``ws://``) would also work. + +Once you're connected, you can send any message and the server will echo it, +then press Ctrl-D to terminate the connection: + +.. code:: console + + > Hello! + < Hello! + Connection closed: code = 1000 (OK), no reason. diff --git a/docs/index.rst b/docs/index.rst index 1b2f85f0a..90262ba9a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -62,6 +62,7 @@ These guides will help you build and deploy a ``websockets`` application. cheatsheet deployment extensions + heroku Reference --------- diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index dd3500b73..5e0a254c7 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -22,6 +22,7 @@ cryptocurrency Ctrl daemonize fractalideas +IPv iterable keepalive KiB @@ -48,6 +49,7 @@ Unparse unregister uple username +virtualenv websocket WebSocket websockets From 93f78884ffcaf71a60d4ad20eabb603224453fa2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Tue, 20 Apr 2021 19:37:01 +0200 Subject: [PATCH 250/281] Bump year. --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index b2962adba..119b29ef3 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2013-2019 Aymeric Augustin and contributors. +Copyright (c) 2013-2021 Aymeric Augustin and contributors. All rights reserved. Redistribution and use in source and binary forms, with or without From 6b9e821183f8b42984e49313a7a3f5ccdd6fa8fc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 22 Apr 2021 09:02:14 +0200 Subject: [PATCH 251/281] Clarify backwards-compatibility policy. --- docs/changelog.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index de4483b17..f3bc3a297 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -11,7 +11,7 @@ Backwards-compatibility policy ``websockets`` also aims at providing the best API for WebSocket in Python. While we value stability, we value progress more. When an improvement requires -changing the API, we make the change and document it below. +changing a public API, we make the change and document it in this changelog. When possible with reasonable effort, we preserve backwards-compatibility for five years after the release that introduced the change. @@ -20,6 +20,9 @@ When a release contains backwards-incompatible API changes, the major version is increased, else the minor version is increased. Patch versions are only for fixing regressions shortly after a release. +Only documented APIs are public. Undocumented APIs are considered private. +They may change at any time. + 9.0 ... From c2c8bffcf5e8cae8a648c06e4cf64943550be216 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 22 Apr 2021 09:10:26 +0200 Subject: [PATCH 252/281] Improve explanation of ongoing refactoring. --- docs/changelog.rst | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f3bc3a297..4b1843713 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -3,6 +3,8 @@ Changelog .. currentmodule:: websockets +.. _backwards-compatibility policy: + Backwards-compatibility policy .............................. @@ -32,22 +34,24 @@ They may change at any time. **Version 9.0 moves or deprecates several APIs.** + Aliases provide backwards compatibility for all previously public APIs. + * :class:`~datastructures.Headers` and :exc:`~datastructures.MultipleValuesError` were moved from - ``websockets.http`` to :mod:`websockets.datastructures`. - - * ``websockets.client``, ``websockets.server``, ``websockets.protocol``, - and ``websockets.auth`` were moved to :mod:`websockets.legacy.client`, - :mod:`websockets.legacy.server`, :mod:`websockets.legacy.protocol`, and - :mod:`websockets.legacy.auth` respectively. - - * ``websockets.handshake`` is deprecated. - - * ``websockets.http`` is deprecated. - - * ``websockets.framing`` is deprecated. - - Aliases provide backwards compatibility for all previously public APIs. + ``websockets.http`` to :mod:`websockets.datastructures`. If you're using + them, you should adjust the import path. + + * The ``client``, ``server``, ``protocol``, and ``auth`` modules were + moved from the ``websockets`` package to ``websockets.legacy`` + sub-package, as part of an upcoming refactoring. Despite the name, + they're still fully supported. The refactoring should be a transparent + upgrade for most uses when it's available. The legacy implementation + will be preserved according to the `backwards-compatibility policy`_. + + * The ``handshake``, ``http``, and ``framing`` modules in the + ``websockets`` package are deprecated. These modules provided low-level + APIs for reuse by other WebSocket implementations, but that never + happened and keeping these APIs public prevents improvements. * Added compatibility with Python 3.9. From ce1f4a071cc6651ff8bcf89f0919721aa9ca4574 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 22 Apr 2021 09:37:17 +0200 Subject: [PATCH 253/281] Deprecate headers and uri as well. They aren't involved in any public API any more. --- docs/changelog.rst | 9 +++++---- src/websockets/__init__.py | 4 ++-- src/websockets/headers.py | 3 --- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 4b1843713..9b2fa4441 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -48,10 +48,11 @@ They may change at any time. upgrade for most uses when it's available. The legacy implementation will be preserved according to the `backwards-compatibility policy`_. - * The ``handshake``, ``http``, and ``framing`` modules in the - ``websockets`` package are deprecated. These modules provided low-level - APIs for reuse by other WebSocket implementations, but that never - happened and keeping these APIs public prevents improvements. + * The ``framing``, ``handshake``, ``headers``, ``http``, and ``uri`` + modules in the ``websockets`` package are deprecated. These modules + provided low-level APIs for reuse by other WebSocket implementations, + but that never happened. Keeping these APIs public makes it more + difficult to improve websockets for no actual benefit. * Added compatibility with Python 3.9. diff --git a/src/websockets/__init__.py b/src/websockets/__init__.py index 580a3960f..65d9fb913 100644 --- a/src/websockets/__init__.py +++ b/src/websockets/__init__.py @@ -96,11 +96,11 @@ "ExtensionHeader": ".typing", "ExtensionParameter": ".typing", "Subprotocol": ".typing", - "parse_uri": ".uri", - "WebSocketURI": ".uri", }, deprecated_aliases={ "framing": ".legacy", "handshake": ".legacy", + "parse_uri": ".uri", + "WebSocketURI": ".uri", }, ) diff --git a/src/websockets/headers.py b/src/websockets/headers.py index 256c66bb1..6779c9c04 100644 --- a/src/websockets/headers.py +++ b/src/websockets/headers.py @@ -2,9 +2,6 @@ :mod:`websockets.headers` provides parsers and serializers for HTTP headers used in WebSocket handshake messages. -These APIs cannot be imported from :mod:`websockets`. They must be imported -from :mod:`websockets.headers`. - """ import base64 From bb40530d4051dd1dbc0522e4d9e3e72cc7e25436 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 22 Apr 2021 09:40:27 +0200 Subject: [PATCH 254/281] Remove deprecated modules from API documentation. --- docs/api.rst | 40 ++++++++-------------------------------- 1 file changed, 8 insertions(+), 32 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index c73cf59d3..2adc0dde4 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -23,11 +23,8 @@ For convenience, public APIs can be imported directly from the :mod:`websockets` package, unless noted otherwise. Anything that isn't listed in this document is a private API. -High-level ----------- - Server -...... +------ .. automodule:: websockets.legacy.server @@ -51,7 +48,7 @@ Server .. automethod:: select_subprotocol Client -...... +------ .. automodule:: websockets.legacy.client @@ -66,7 +63,7 @@ Client .. automethod:: handshake Shared -...... +------ .. automodule:: websockets.legacy.protocol @@ -88,7 +85,7 @@ Shared .. autoattribute:: closed Types -..... +----- .. automodule:: websockets.typing @@ -96,7 +93,7 @@ Types Per-Message Deflate Extension -............................. +----------------------------- .. automodule:: websockets.extensions.permessage_deflate @@ -105,7 +102,7 @@ Per-Message Deflate Extension .. autoclass:: ClientPerMessageDeflateFactory HTTP Basic Auth -............... +--------------- .. automodule:: websockets.legacy.auth @@ -116,34 +113,13 @@ HTTP Basic Auth .. automethod:: process_request Data structures -............... +--------------- .. automodule:: websockets.datastructures :members: Exceptions -.......... +---------- .. automodule:: websockets.exceptions :members: - -Low-level ---------- - -Data transfer -............. - -.. automodule:: websockets.framing - :members: - -URI parser -.......... - -.. automodule:: websockets.uri - :members: - -Utilities -......... - -.. automodule:: websockets.headers - :members: From c0002603eb39a9a85f89a0c83337ce398aeea7de Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 25 Apr 2021 21:39:57 +0200 Subject: [PATCH 255/281] Make HeadersLike a public API. Refs #845, #854. --- src/websockets/datastructures.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/websockets/datastructures.py b/src/websockets/datastructures.py index f70d92ad7..c8e17fa98 100644 --- a/src/websockets/datastructures.py +++ b/src/websockets/datastructures.py @@ -1,5 +1,5 @@ """ -This module defines a data structure for manipulating HTTP headers. +:mod:`websockets.datastructures` defines a class for manipulating HTTP headers. """ @@ -16,7 +16,7 @@ ) -__all__ = ["Headers", "MultipleValuesError"] +__all__ = ["Headers", "HeadersLike", "MultipleValuesError"] class MultipleValuesError(LookupError): @@ -63,7 +63,7 @@ class Headers(MutableMapping[str, str]): As long as no header occurs multiple times, :class:`Headers` behaves like :class:`dict`, except keys are lower-cased to provide case-insensitivity. - Two methods support support manipulating multiple values explicitly: + Two methods support manipulating multiple values explicitly: - :meth:`get_all` returns a list of all values for a header; - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs. @@ -157,3 +157,9 @@ def raw_items(self) -> Iterator[Tuple[str, str]]: HeadersLike = Union[Headers, Mapping[str, str], Iterable[Tuple[str, str]]] +HeadersLike__doc__ = """Types accepted wherever :class:`Headers` is expected""" +# Remove try / except when dropping support for Python < 3.7 +try: + HeadersLike.__doc__ = HeadersLike__doc__ +except AttributeError: # pragma: no cover + pass From fa295a75fd0fcf53906d7aa0fe4fdcc8c7d81cd2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 25 Apr 2021 21:41:39 +0200 Subject: [PATCH 256/281] Rewrite extensions guide. --- docs/deployment.rst | 2 ++ docs/extensions.rst | 79 ++++++++++++++++++++++++++------------------- 2 files changed, 47 insertions(+), 34 deletions(-) diff --git a/docs/deployment.rst b/docs/deployment.rst index ed025094d..2331af936 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -66,6 +66,8 @@ Memory usage of a single connection is the sum of: Baseline ........ +.. _compression-settings: + Compression settings are the main factor affecting the baseline amount of memory used by each connection. diff --git a/docs/extensions.rst b/docs/extensions.rst index dea91219e..151a7e297 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -1,12 +1,12 @@ Extensions ========== -.. currentmodule:: websockets +.. currentmodule:: websockets.extensions The WebSocket protocol supports extensions_. -At the time of writing, there's only one `registered extension`_, WebSocket -Per-Message Deflate, specified in :rfc:`7692`. +At the time of writing, there's only one `registered extension`_ with a public +specification, WebSocket Per-Message Deflate, specified in :rfc:`7692`. .. _extensions: https://tools.ietf.org/html/rfc6455#section-9 .. _registered extension: https://www.iana.org/assignments/websocket/websocket.xhtml#extension-name @@ -14,24 +14,31 @@ Per-Message Deflate, specified in :rfc:`7692`. Per-Message Deflate ------------------- -:func:`~legacy.server.serve()` and :func:`~legacy.client.connect` enable the -Per-Message Deflate extension by default. You can disable this with -``compression=None``. +:func:`~websockets.legacy.client.connect` and +:func:`~websockets.legacy.server.serve` enable the Per-Message Deflate +extension by default. + +If you want to disable it, set ``compression=None``:: + + import websockets + + websockets.connect(..., compression=None) + + websockets.serve(..., compression=None) -You can also configure the Per-Message Deflate extension explicitly if you -want to customize its parameters. .. _per-message-deflate-configuration-example: -Here's an example on the server side:: +You can also configure the Per-Message Deflate extension explicitly if you +want to customize compression settings:: import websockets from websockets.extensions import permessage_deflate - websockets.serve( + websockets.connect( ..., extensions=[ - permessage_deflate.ServerPerMessageDeflateFactory( + permessage_deflate.ClientPerMessageDeflateFactory( server_max_window_bits=11, client_max_window_bits=11, compress_settings={'memLevel': 4}, @@ -39,15 +46,10 @@ Here's an example on the server side:: ], ) -Here's an example on the client side:: - - import websockets - from websockets.extensions import permessage_deflate - - websockets.connect( + websockets.serve( ..., extensions=[ - permessage_deflate.ClientPerMessageDeflateFactory( + permessage_deflate.ServerPerMessageDeflateFactory( server_max_window_bits=11, client_max_window_bits=11, compress_settings={'memLevel': 4}, @@ -55,34 +57,43 @@ Here's an example on the client side:: ], ) +The window bits and memory level values chosen in these examples reduce memory +usage. You can read more about :ref:`optimizing compression settings +`. + Refer to the API documentation of -:class:`~extensions.permessage_deflate.ServerPerMessageDeflateFactory` and -:class:`~extensions.permessage_deflate.ClientPerMessageDeflateFactory` for -details. +:class:`~permessage_deflate.ClientPerMessageDeflateFactory` and +:class:`~permessage_deflate.ServerPerMessageDeflateFactory` for details. Writing an extension -------------------- During the opening handshake, WebSocket clients and servers negotiate which extensions will be used with which parameters. Then each frame is processed by -extensions before it's sent and after it's received. +extensions before being sent or after being received. + +As a consequence, writing an extension requires implementing several classes: + +* Extension Factory: it negotiates parameters and instantiates the extension. -As a consequence writing an extension requires implementing several classes: + Clients and servers require separate extension factories with distinct APIs. -1. Extension Factory: it negotiates parameters and instantiates the extension. - Clients and servers require separate extension factories with distinct APIs. + Extension factories are the public API of an extension. -2. Extension: it decodes incoming frames and encodes outgoing frames. If the - extension is symmetrical, clients and servers can use the same class. +* Extension: it decodes incoming frames and encodes outgoing frames. + + If the extension is symmetrical, clients and servers can use the same + class. + + Extensions are initialized by extension factories, so they don't need to be + part of the public API of an extension. ``websockets`` provides abstract base classes for extension factories and -extensions. +extensions. See the API documentation for details on their methods: + +* :class:`~base.ClientExtensionFactory` and + :class:`~base.ServerExtensionFactory` for extension factories, -.. autoclass:: websockets.extensions.base.ServerExtensionFactory - :members: +* :class:`~base.Extension` for extensions. -.. autoclass:: websockets.extensions.base.ClientExtensionFactory - :members: -.. autoclass:: websockets.extensions.base.Extension - :members: From 835d16dfadd912766df99dac21e82c151eb1bda7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 10:47:38 +0200 Subject: [PATCH 257/281] Add example of client shutdown. Fix #933. --- docs/deployment.rst | 2 +- docs/faq.rst | 11 +++++++++++ example/shutdown_client.py | 19 +++++++++++++++++++ example/{shutdown.py => shutdown_server.py} | 0 4 files changed, 31 insertions(+), 1 deletion(-) create mode 100755 example/shutdown_client.py rename example/{shutdown.py => shutdown_server.py} (100%) diff --git a/docs/deployment.rst b/docs/deployment.rst index 2331af936..8baa8836c 100644 --- a/docs/deployment.rst +++ b/docs/deployment.rst @@ -34,7 +34,7 @@ On Unix systems, shutdown is usually triggered by sending a signal. Here's a full example for handling SIGTERM on Unix: -.. literalinclude:: ../example/shutdown.py +.. literalinclude:: ../example/shutdown_server.py :emphasize-lines: 13,17-19 This example is easily adapted to handle other signals. If you override the diff --git a/docs/faq.rst b/docs/faq.rst index eee14dda8..ff91105b4 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -142,6 +142,17 @@ See `issue 414`_. .. _issue 414: https://github.com/aaugustin/websockets/issues/414 +How do I stop a client that is continuously processing messages? +................................................................ + +You can close the connection. + +Here's an example that terminates cleanly when it receives SIGTERM on Unix: + +.. literalinclude:: ../example/shutdown_client.py + :emphasize-lines: 10-13 + + How do I disable TLS/SSL certificate verification? .................................................. diff --git a/example/shutdown_client.py b/example/shutdown_client.py new file mode 100755 index 000000000..f21c0f6fa --- /dev/null +++ b/example/shutdown_client.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python + +import asyncio +import signal +import websockets + +async def client(): + uri = "ws://localhost:8765" + async with websockets.connect(uri) as websocket: + # Close the connection when receiving SIGTERM. + loop = asyncio.get_event_loop() + loop.add_signal_handler( + signal.SIGTERM, loop.create_task, websocket.close()) + + # Process messages received on the connection. + async for message in websocket: + ... + +asyncio.get_event_loop().run_until_complete(client()) diff --git a/example/shutdown.py b/example/shutdown_server.py similarity index 100% rename from example/shutdown.py rename to example/shutdown_server.py From cf2453625a023868bfe760dc438a500e3ebcb931 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 21:45:32 +0200 Subject: [PATCH 258/281] Clean up signature of Protocol classes. --- src/websockets/legacy/client.py | 10 ++++++---- src/websockets/legacy/server.py | 13 +++++++------ 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/websockets/legacy/client.py b/src/websockets/legacy/client.py index 27f6e8209..1c0ecf62f 100644 --- a/src/websockets/legacy/client.py +++ b/src/websockets/legacy/client.py @@ -374,7 +374,6 @@ def __init__( self, uri: str, *, - path: Optional[str] = None, create_protocol: Optional[Callable[[Any], WebSocketClientProtocol]] = None, ping_interval: Optional[float] = 20, ping_timeout: Optional[float] = 20, @@ -384,9 +383,6 @@ def __init__( read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, - legacy_recv: bool = False, - klass: Optional[Type[WebSocketClientProtocol]] = None, - timeout: Optional[float] = None, compression: Optional[str] = "deflate", origin: Optional[Origin] = None, extensions: Optional[Sequence[ClientExtensionFactory]] = None, @@ -395,6 +391,7 @@ def __init__( **kwargs: Any, ) -> None: # Backwards compatibility: close_timeout used to be called timeout. + timeout: Optional[float] = kwargs.pop("timeout", None) if timeout is None: timeout = 10 else: @@ -404,6 +401,7 @@ def __init__( close_timeout = timeout # Backwards compatibility: create_protocol used to be called klass. + klass: Optional[Type[WebSocketClientProtocol]] = kwargs.pop("klass", None) if klass is None: klass = WebSocketClientProtocol else: @@ -412,6 +410,9 @@ def __init__( if create_protocol is None: create_protocol = klass + # Backwards compatibility: recv() used to return None on closed connections + legacy_recv: bool = kwargs.pop("legacy_recv", False) + if loop is None: loop = asyncio.get_event_loop() @@ -449,6 +450,7 @@ def __init__( extra_headers=extra_headers, ) + path: Optional[str] = kwargs.pop("path", None) if path is None: host: Optional[str] port: Optional[int] diff --git a/src/websockets/legacy/server.py b/src/websockets/legacy/server.py index 42e0d6cf0..b7eed52b0 100644 --- a/src/websockets/legacy/server.py +++ b/src/websockets/legacy/server.py @@ -851,7 +851,6 @@ def __init__( host: Optional[Union[str, Sequence[str]]] = None, port: Optional[int] = None, *, - path: Optional[str] = None, create_protocol: Optional[Callable[[Any], WebSocketServerProtocol]] = None, ping_interval: Optional[float] = 20, ping_timeout: Optional[float] = 20, @@ -861,9 +860,6 @@ def __init__( read_limit: int = 2 ** 16, write_limit: int = 2 ** 16, loop: Optional[asyncio.AbstractEventLoop] = None, - legacy_recv: bool = False, - klass: Optional[Type[WebSocketServerProtocol]] = None, - timeout: Optional[float] = None, compression: Optional[str] = "deflate", origins: Optional[Sequence[Optional[Origin]]] = None, extensions: Optional[Sequence[ServerExtensionFactory]] = None, @@ -875,10 +871,10 @@ def __init__( select_subprotocol: Optional[ Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol] ] = None, - unix: bool = False, **kwargs: Any, ) -> None: # Backwards compatibility: close_timeout used to be called timeout. + timeout: Optional[float] = kwargs.pop("timeout", None) if timeout is None: timeout = 10 else: @@ -888,6 +884,7 @@ def __init__( close_timeout = timeout # Backwards compatibility: create_protocol used to be called klass. + klass: Optional[Type[WebSocketServerProtocol]] = kwargs.pop("klass", None) if klass is None: klass = WebSocketServerProtocol else: @@ -896,6 +893,9 @@ def __init__( if create_protocol is None: create_protocol = klass + # Backwards compatibility: recv() used to return None on closed connections + legacy_recv: bool = kwargs.pop("legacy_recv", False) + if loop is None: loop = asyncio.get_event_loop() @@ -932,7 +932,8 @@ def __init__( select_subprotocol=select_subprotocol, ) - if unix: + if kwargs.pop("unix", False): + path: Optional[str] = kwargs.pop("path", None) # unix_serve(path) must not specify host and port parameters. assert host is None and port is None create_server = functools.partial( From 9c818367b2177aae6c90c3a5c4fad26e540c81bc Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 21:51:07 +0200 Subject: [PATCH 259/281] Support existing Unix sockets in unix_connect. The same fix was made for the server side, but not the client side. --- docs/changelog.rst | 2 +- src/websockets/legacy/client.py | 18 ++++++++++-------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 9b2fa4441..91ea23dc9 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -67,7 +67,7 @@ They may change at any time. * Fixed ``Host`` header sent when connecting to an IPv6 address. -* Fixed starting a Unix server listening on an existing socket. +* Fixed creating a client or a server with an existing Unix socket. * Aligned maximum cookie size with popular web browsers. diff --git a/src/websockets/legacy/client.py b/src/websockets/legacy/client.py index 1c0ecf62f..219c3c9bc 100644 --- a/src/websockets/legacy/client.py +++ b/src/websockets/legacy/client.py @@ -450,8 +450,12 @@ def __init__( extra_headers=extra_headers, ) - path: Optional[str] = kwargs.pop("path", None) - if path is None: + if kwargs.pop("unix", False): + path: Optional[str] = kwargs.pop("path", None) + create_connection = functools.partial( + loop.create_unix_connection, factory, path, **kwargs + ) + else: host: Optional[str] port: Optional[int] if kwargs.get("sock") is None: @@ -465,10 +469,6 @@ def __init__( create_connection = functools.partial( loop.create_connection, factory, host, port, **kwargs ) - else: - create_connection = functools.partial( - loop.create_unix_connection, factory, path, **kwargs - ) # This is a coroutine function. self._create_connection = create_connection @@ -563,7 +563,9 @@ async def __await_impl__(self) -> WebSocketClientProtocol: connect = Connect -def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Connect: +def unix_connect( + path: Optional[str], uri: str = "ws://localhost/", **kwargs: Any +) -> Connect: """ Similar to :func:`connect`, but for connecting to a Unix socket. @@ -578,4 +580,4 @@ def unix_connect(path: str, uri: str = "ws://localhost/", **kwargs: Any) -> Conn :param uri: WebSocket URI """ - return connect(uri=uri, path=path, **kwargs) + return connect(uri=uri, path=path, unix=True, **kwargs) From 9223d7d72ab11824988442847d4c02d7524a61c1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 22:15:58 +0200 Subject: [PATCH 260/281] Restore backwards-compatibility for logger names. --- src/websockets/legacy/client.py | 2 +- src/websockets/legacy/protocol.py | 2 +- src/websockets/legacy/server.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/websockets/legacy/client.py b/src/websockets/legacy/client.py index 219c3c9bc..4000375fb 100644 --- a/src/websockets/legacy/client.py +++ b/src/websockets/legacy/client.py @@ -40,7 +40,7 @@ __all__ = ["connect", "unix_connect", "WebSocketClientProtocol"] -logger = logging.getLogger(__name__) +logger = logging.getLogger("websockets.server") class WebSocketClientProtocol(WebSocketCommonProtocol): diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py index aa1b156c6..84af7b626 100644 --- a/src/websockets/legacy/protocol.py +++ b/src/websockets/legacy/protocol.py @@ -60,7 +60,7 @@ __all__ = ["WebSocketCommonProtocol"] -logger = logging.getLogger(__name__) +logger = logging.getLogger("websockets.protocol") # A WebSocket connection goes through the following four states, in order: diff --git a/src/websockets/legacy/server.py b/src/websockets/legacy/server.py index b7eed52b0..8e5f97a66 100644 --- a/src/websockets/legacy/server.py +++ b/src/websockets/legacy/server.py @@ -50,7 +50,7 @@ __all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"] -logger = logging.getLogger(__name__) +logger = logging.getLogger("websockets.server") HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]] From fcb3a4c31838b797ff609d2fdb89db7f37c527ff Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 22:32:20 +0200 Subject: [PATCH 261/281] Remove backwards-compatibility from docs after 5 years. --- src/websockets/legacy/protocol.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py index 84af7b626..56c4d5f6a 100644 --- a/src/websockets/legacy/protocol.py +++ b/src/websockets/legacy/protocol.py @@ -466,11 +466,6 @@ async def recv(self) -> Data: :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol error or a network failure. - .. versionchanged:: 3.0 - - :meth:`recv` used to return ``None`` instead. Refer to the - changelog for details. - Canceling :meth:`recv` is safe. There's no risk of losing the next message. The next invocation of :meth:`recv` will return it. This makes it possible to enforce a timeout by wrapping :meth:`recv` in From d82a7a9de7cebec22bdcdf763ba4cb7ea75bdb76 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 21:45:51 +0200 Subject: [PATCH 262/281] Revamp API documentation. --- docs/api.rst | 125 ----------------------- docs/api/client.rst | 74 ++++++++++++++ docs/api/extensions.rst | 26 +++++ docs/api/index.rst | 50 ++++++++++ docs/api/server.rst | 105 +++++++++++++++++++ docs/api/utilities.rst | 20 ++++ docs/design.rst | 6 +- docs/index.rst | 2 +- docs/spelling_wordlist.txt | 1 + src/websockets/legacy/auth.py | 3 - src/websockets/legacy/client.py | 122 +++++++++++++++++++--- src/websockets/legacy/protocol.py | 97 +----------------- src/websockets/legacy/server.py | 161 ++++++++++++++++++++++++------ 13 files changed, 517 insertions(+), 275 deletions(-) delete mode 100644 docs/api.rst create mode 100644 docs/api/client.rst create mode 100644 docs/api/extensions.rst create mode 100644 docs/api/index.rst create mode 100644 docs/api/server.rst create mode 100644 docs/api/utilities.rst diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 2adc0dde4..000000000 --- a/docs/api.rst +++ /dev/null @@ -1,125 +0,0 @@ -API -=== - -Design ------- - -``websockets`` provides complete client and server implementations, as shown -in the :doc:`getting started guide `. These functions are built on top -of low-level APIs reflecting the two phases of the WebSocket protocol: - -1. An opening handshake, in the form of an HTTP Upgrade request; - -2. Data transfer, as framed messages, ending with a closing handshake. - -The first phase is designed to integrate with existing HTTP software. -``websockets`` provides a minimal implementation to build, parse and validate -HTTP requests and responses. - -The second phase is the core of the WebSocket protocol. ``websockets`` -provides a complete implementation on top of ``asyncio`` with a simple API. - -For convenience, public APIs can be imported directly from the -:mod:`websockets` package, unless noted otherwise. Anything that isn't listed -in this document is a private API. - -Server ------- - -.. automodule:: websockets.legacy.server - - .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) - :async: - - .. autofunction:: unix_serve(ws_handler, path, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) - :async: - - - .. autoclass:: WebSocketServer - - .. automethod:: close - .. automethod:: wait_closed - .. autoattribute:: sockets - - .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) - - .. automethod:: handshake - .. automethod:: process_request - .. automethod:: select_subprotocol - -Client ------- - -.. automodule:: websockets.legacy.client - - .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) - :async: - - .. autofunction:: unix_connect(path, uri="ws://localhost/", *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) - :async: - - .. autoclass:: WebSocketClientProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) - - .. automethod:: handshake - -Shared ------- - -.. automodule:: websockets.legacy.protocol - - .. autoclass:: WebSocketCommonProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None) - - .. automethod:: close - .. automethod:: wait_closed - - .. automethod:: recv - .. automethod:: send - - .. automethod:: ping - .. automethod:: pong - - .. autoattribute:: local_address - .. autoattribute:: remote_address - - .. autoattribute:: open - .. autoattribute:: closed - -Types ------ - -.. automodule:: websockets.typing - - .. autodata:: Data - - -Per-Message Deflate Extension ------------------------------ - -.. automodule:: websockets.extensions.permessage_deflate - - .. autoclass:: ServerPerMessageDeflateFactory - - .. autoclass:: ClientPerMessageDeflateFactory - -HTTP Basic Auth ---------------- - -.. automodule:: websockets.legacy.auth - - .. autofunction:: basic_auth_protocol_factory - - .. autoclass:: BasicAuthWebSocketServerProtocol - - .. automethod:: process_request - -Data structures ---------------- - -.. automodule:: websockets.datastructures - :members: - -Exceptions ----------- - -.. automodule:: websockets.exceptions - :members: diff --git a/docs/api/client.rst b/docs/api/client.rst new file mode 100644 index 000000000..f969227a9 --- /dev/null +++ b/docs/api/client.rst @@ -0,0 +1,74 @@ +Client +====== + +.. automodule:: websockets.legacy.client + + Opening a connection + -------------------- + + .. autofunction:: connect(uri, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) + :async: + + .. autofunction:: unix_connect(path, uri="ws://localhost/", *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origin=None, extensions=None, subprotocols=None, extra_headers=None, **kwds) + :async: + + Using a connection + ------------------ + + .. autoclass:: WebSocketClientProtocol(*, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origin=None, extensions=None, subprotocols=None, extra_headers=None) + + .. autoattribute:: local_address + + .. autoattribute:: remote_address + + .. autoattribute:: open + + .. autoattribute:: closed + + .. attribute:: path + + Path of the HTTP request. + + Available once the connection is open. + + .. attribute:: request_headers + + HTTP request headers as a :class:`~websockets.http.Headers` instance. + + Available once the connection is open. + + .. attribute:: response_headers + + HTTP response headers as a :class:`~websockets.http.Headers` instance. + + Available once the connection is open. + + .. attribute:: subprotocol + + Subprotocol, if one was negotiated. + + Available once the connection is open. + + .. attribute:: close_code + + WebSocket close code. + + Available once the connection is closed. + + .. attribute:: close_reason + + WebSocket close reason. + + Available once the connection is closed. + + .. automethod:: recv + + .. automethod:: send + + .. automethod:: ping + + .. automethod:: pong + + .. automethod:: close + + .. automethod:: wait_closed diff --git a/docs/api/extensions.rst b/docs/api/extensions.rst new file mode 100644 index 000000000..635c5c426 --- /dev/null +++ b/docs/api/extensions.rst @@ -0,0 +1,26 @@ +Extensions +========== + +Per-Message Deflate +------------------- + +.. automodule:: websockets.extensions.permessage_deflate + + .. autoclass:: ClientPerMessageDeflateFactory + + .. autoclass:: ServerPerMessageDeflateFactory + +Abstract classes +---------------- + +.. automodule:: websockets.extensions.base + + .. autoclass:: Extension + :members: + + .. autoclass:: ClientExtensionFactory + :members: + + .. autoclass:: ServerExtensionFactory + :members: + diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 000000000..20bb740b3 --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,50 @@ +API +=== + +``websockets`` provides complete client and server implementations, as shown +in the :doc:`getting started guide <../intro>`. + +The process for opening and closing a WebSocket connection depends on which +side you're implementing. + +* On the client side, connecting to a server with :class:`~websockets.connect` + yields a connection object that provides methods for interacting with the + connection. Your code can open a connection, then send or receive messages. + + If you use :class:`~websockets.connect` as an asynchronous context manager, + then websockets closes the connection on exit. If not, then your code is + responsible for closing the connection. + +* On the server side, :class:`~websockets.serve` starts listening for client + connections and yields an server object that supports closing the server. + + Then, when clients connects, the server initializes a connection object and + passes it to a handler coroutine, which is where your code can send or + receive messages. This pattern is called `inversion of control`_. It's + common in frameworks implementing servers. + + When the handler coroutine terminates, websockets closes the connection. You + may also close it in the handler coroutine if you'd like. + +.. _inversion of control: https://en.wikipedia.org/wiki/Inversion_of_control + +Once the connection is open, the WebSocket protocol is symmetrical, except for +low-level details that websockets manages under the hood. The same methods are +available on client connections created with :class:`~websockets.connect` and +on server connections passed to the connection handler in the arguments. + +At this point, websockets provides the same API — and uses the same code — for +client and server connections. For convenience, common methods are documented +both in the client API and server API. + +.. toctree:: + :maxdepth: 2 + + client + server + extensions + utilities + +All public APIs can be imported from the :mod:`websockets` package, unless +noted otherwise. Anything that isn't listed in this API documentation is a +private API, with no guarantees of behavior or backwards-compatibility. diff --git a/docs/api/server.rst b/docs/api/server.rst new file mode 100644 index 000000000..16c8f6359 --- /dev/null +++ b/docs/api/server.rst @@ -0,0 +1,105 @@ +Server +====== + +.. automodule:: websockets.legacy.server + + Starting a server + ----------------- + + .. autofunction:: serve(ws_handler, host=None, port=None, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + :async: + + .. autofunction:: unix_serve(ws_handler, path, *, create_protocol=None, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, compression='deflate', origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None, **kwds) + :async: + + Stopping a server + ----------------- + + .. autoclass:: WebSocketServer + + .. autoattribute:: sockets + + .. automethod:: close + .. automethod:: wait_closed + + Using a connection + ------------------ + + .. autoclass:: WebSocketServerProtocol(ws_handler, ws_server, *, ping_interval=20, ping_timeout=20, close_timeout=10, max_size=2 ** 20, max_queue=2 ** 5, read_limit=2 ** 16, write_limit=2 ** 16, loop=None, origins=None, extensions=None, subprotocols=None, extra_headers=None, process_request=None, select_subprotocol=None) + + .. autoattribute:: local_address + + .. autoattribute:: remote_address + + .. autoattribute:: open + + .. autoattribute:: closed + + .. attribute:: path + + Path of the HTTP request. + + Available once the connection is open. + + .. attribute:: request_headers + + HTTP request headers as a :class:`~websockets.http.Headers` instance. + + Available once the connection is open. + + .. attribute:: response_headers + + HTTP response headers as a :class:`~websockets.http.Headers` instance. + + Available once the connection is open. + + .. attribute:: subprotocol + + Subprotocol, if one was negotiated. + + Available once the connection is open. + + .. attribute:: close_code + + WebSocket close code. + + Available once the connection is closed. + + .. attribute:: close_reason + + WebSocket close reason. + + Available once the connection is closed. + + .. automethod:: process_request + + .. automethod:: select_subprotocol + + .. automethod:: recv + + .. automethod:: send + + .. automethod:: ping + + .. automethod:: pong + + .. automethod:: close + + .. automethod:: wait_closed + +Basic authentication +-------------------- + +.. automodule:: websockets.legacy.auth + + .. autofunction:: basic_auth_protocol_factory + + .. autoclass:: BasicAuthWebSocketServerProtocol + + .. automethod:: process_request + + .. attribute:: username + + Username of the authenticated user. + + diff --git a/docs/api/utilities.rst b/docs/api/utilities.rst new file mode 100644 index 000000000..198e928b0 --- /dev/null +++ b/docs/api/utilities.rst @@ -0,0 +1,20 @@ +Utilities +========= + +Data structures +--------------- + +.. automodule:: websockets.datastructures + :members: + +Exceptions +---------- + +.. automodule:: websockets.exceptions + :members: + +Types +----- + +.. automodule:: websockets.typing + :members: diff --git a/docs/design.rst b/docs/design.rst index f2718370d..0cabc2e5d 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -13,7 +13,7 @@ wish to understand what happens under the hood. Internals described in this document may change at any time. - Backwards compatibility is only guaranteed for `public APIs `_. + Backwards compatibility is only guaranteed for `public APIs `_. Lifecycle @@ -404,8 +404,8 @@ don't involve inversion of control. Library ....... -Most :doc:`public APIs ` of ``websockets`` are coroutines. They may be -canceled, for example if the user starts a task that calls these coroutines +Most :doc:`public APIs ` of ``websockets`` are coroutines. They may +be canceled, for example if the user starts a task that calls these coroutines and cancels the task later. ``websockets`` must handle this situation. Cancellation during the opening handshake is handled like any other exception: diff --git a/docs/index.rst b/docs/index.rst index 90262ba9a..e121fd930 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -72,7 +72,7 @@ Find all the details you could ask for, and then some. .. toctree:: :maxdepth: 2 - api + api/index Discussions ----------- diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 5e0a254c7..d7c744147 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -21,6 +21,7 @@ cryptocurrencies cryptocurrency Ctrl daemonize +datastructures fractalideas IPv iterable diff --git a/src/websockets/legacy/auth.py b/src/websockets/legacy/auth.py index 8cb60429a..e0beede57 100644 --- a/src/websockets/legacy/auth.py +++ b/src/websockets/legacy/auth.py @@ -52,9 +52,6 @@ async def process_request( """ Check HTTP Basic Auth and return a HTTP 401 or 403 response if needed. - If authentication succeeds, the username of the authenticated user is - stored in the ``username`` attribute. - """ try: authorization = request_headers["Authorization"] diff --git a/src/websockets/legacy/client.py b/src/websockets/legacy/client.py index 4000375fb..1b5bd303f 100644 --- a/src/websockets/legacy/client.py +++ b/src/websockets/legacy/client.py @@ -47,8 +47,97 @@ class WebSocketClientProtocol(WebSocketCommonProtocol): """ :class:`~asyncio.Protocol` subclass implementing a WebSocket client. - This class inherits most of its methods from - :class:`~websockets.protocol.WebSocketCommonProtocol`. + :class:`WebSocketClientProtocol`: + + * performs the opening handshake to establish the connection; + * provides :meth:`recv` and :meth:`send` coroutines for receiving and + sending messages; + * deals with control frames automatically; + * performs the closing handshake to terminate the connection. + + :class:`WebSocketClientProtocol` supports asynchronous iteration:: + + async for message in websocket: + await process(message) + + The iterator yields incoming messages. It exits normally when the + connection is closed with the close code 1000 (OK) or 1001 (going away). + It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception + when the connection is closed with any other code. + + Once the connection is open, a `Ping frame`_ is sent every + ``ping_interval`` seconds. This serves as a keepalive. It helps keeping + the connection open, especially in the presence of proxies with short + timeouts on inactive connections. Set ``ping_interval`` to ``None`` to + disable this behavior. + + .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 + + If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` + seconds, the connection is considered unusable and is closed with + code 1011. This ensures that the remote endpoint remains responsive. Set + ``ping_timeout`` to ``None`` to disable this behavior. + + .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3 + + The ``close_timeout`` parameter defines a maximum wait time for completing + the closing handshake and terminating the TCP connection. For legacy + reasons, :meth:`close` completes in at most ``5 * close_timeout`` seconds. + + ``close_timeout`` needs to be a parameter of the protocol because + websockets usually calls :meth:`close` implicitly upon exit when + :func:`connect` is used as a context manager. + + To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. + + The ``max_size`` parameter enforces the maximum size for incoming messages + in bytes. The default value is 1 MiB. ``None`` disables the limit. If a + message larger than the maximum size is received, :meth:`recv` will + raise :exc:`~websockets.exceptions.ConnectionClosedError` and the + connection will be closed with code 1009. + + The ``max_queue`` parameter sets the maximum length of the queue that + holds incoming messages. The default value is ``32``. ``None`` disables + the limit. Messages are added to an in-memory queue when they're received; + then :meth:`recv` pops from that queue. In order to prevent excessive + memory consumption when messages are received faster than they can be + processed, the queue must be bounded. If the queue fills up, the protocol + stops processing incoming data until :meth:`recv` is called. In this + situation, various receive buffers (at least in :mod:`asyncio` and in the + OS) will fill up, then the TCP receive window will shrink, slowing down + transmission to avoid packet loss. + + Since Python can use up to 4 bytes of memory to represent a single + character, each connection may use up to ``4 * max_size * max_queue`` + bytes of memory to store incoming messages. By default, this is 128 MiB. + You may want to lower the limits, depending on your application's + requirements. + + The ``read_limit`` argument sets the high-water limit of the buffer for + incoming bytes. The low-water limit is half the high-water limit. The + default value is 64 KiB, half of asyncio's default (based on the current + implementation of :class:`~asyncio.StreamReader`). + + The ``write_limit`` argument sets the high-water limit of the buffer for + outgoing bytes. The low-water limit is a quarter of the high-water limit. + The default value is 64 KiB, equal to asyncio's default (based on the + current implementation of ``FlowControlMixin``). + + As soon as the HTTP request and response in the opening handshake are + processed: + + * the request path is available in the :attr:`path` attribute; + * the request and response HTTP headers are available in the + :attr:`request_headers` and :attr:`response_headers` attributes, + which are :class:`~websockets.http.Headers` instances. + + If a subprotocol was negotiated, it's available in the :attr:`subprotocol` + attribute. + + Once the connection is closed, the code is available in the + :attr:`close_code` attribute and the reason in :attr:`close_reason`. + + All attributes must be treated as read-only. """ @@ -318,8 +407,12 @@ class Connect: Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which can then be used to send and receive messages. - :func:`connect` can also be used as a asynchronous context manager. In - that case, the connection is closed when exiting the context. + :func:`connect` can also be used as a asynchronous context manager:: + + async with connect(...) as websocket: + ... + + In that case, the connection is closed when exiting the context. :func:`connect` is a wrapper around the event loop's :meth:`~asyncio.loop.create_connection` method. Unknown keyword arguments @@ -336,31 +429,28 @@ class Connect: used in the TLS handshake for secure connections and in the ``Host`` HTTP header. - The ``create_protocol`` parameter allows customizing the - :class:`~asyncio.Protocol` that manages the connection. It should be a - callable or class accepting the same arguments as - :class:`WebSocketClientProtocol` and returning an instance of - :class:`WebSocketClientProtocol` or a subclass. It defaults to - :class:`WebSocketClientProtocol`. + ``create_protocol`` defaults to :class:`WebSocketClientProtocol`. It may + be replaced by a wrapper or a subclass to customize the protocol that + manages the connection. The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is - described in :class:`~websockets.protocol.WebSocketCommonProtocol`. + described in :class:`WebSocketClientProtocol`. :func:`connect` also accepts the following optional arguments: * ``compression`` is a shortcut to configure compression extensions; by default it enables the "permessage-deflate" extension; set it to - ``None`` to disable compression - * ``origin`` sets the Origin HTTP header + ``None`` to disable compression. + * ``origin`` sets the Origin HTTP header. * ``extensions`` is a list of supported extensions in order of - decreasing preference + decreasing preference. * ``subprotocols`` is a list of supported subprotocols in order of - decreasing preference + decreasing preference. * ``extra_headers`` sets additional HTTP request headers; it can be a :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, or an iterable of ``(name, value)`` - pairs + pairs. :raises ~websockets.uri.InvalidURI: if ``uri`` is invalid :raises ~websockets.handshake.InvalidHandshake: if the opening handshake diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py index 56c4d5f6a..a46e3dc4e 100644 --- a/src/websockets/legacy/protocol.py +++ b/src/websockets/legacy/protocol.py @@ -82,108 +82,13 @@ class WebSocketCommonProtocol(asyncio.Protocol): Once the WebSocket connection is established, during the data transfer phase, the protocol is almost symmetrical between the server side and the client side. :class:`WebSocketCommonProtocol` implements logic that's - shared between servers and clients.. + shared between servers and clients. Subclasses such as :class:`~websockets.legacy.server.WebSocketServerProtocol` and :class:`~websockets.legacy.client.WebSocketClientProtocol` implement the opening handshake, which is different between servers and clients. - :class:`WebSocketCommonProtocol` performs four functions: - - * It runs a task that stores incoming data frames in a queue and makes - them available with the :meth:`recv` coroutine. - * It sends outgoing data frames with the :meth:`send` coroutine. - * It deals with control frames automatically. - * It performs the closing handshake. - - :class:`WebSocketCommonProtocol` supports asynchronous iteration:: - - async for message in websocket: - await process(message) - - The iterator yields incoming messages. It exits normally when the - connection is closed with the close code 1000 (OK) or 1001 (going away). - It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception - when the connection is closed with any other code. - - Once the connection is open, a `Ping frame`_ is sent every - ``ping_interval`` seconds. This serves as a keepalive. It helps keeping - the connection open, especially in the presence of proxies with short - timeouts on inactive connections. Set ``ping_interval`` to ``None`` to - disable this behavior. - - .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 - - If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` - seconds, the connection is considered unusable and is closed with - code 1011. This ensures that the remote endpoint remains responsive. Set - ``ping_timeout`` to ``None`` to disable this behavior. - - .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3 - - The ``close_timeout`` parameter defines a maximum wait time in seconds for - completing the closing handshake and terminating the TCP connection. - :meth:`close` completes in at most ``4 * close_timeout`` on the server - side and ``5 * close_timeout`` on the client side. - - ``close_timeout`` needs to be a parameter of the protocol because - ``websockets`` usually calls :meth:`close` implicitly: - - - on the server side, when the connection handler terminates, - - on the client side, when exiting the context manager for the connection. - - To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. - - The ``max_size`` parameter enforces the maximum size for incoming messages - in bytes. The default value is 1 MiB. ``None`` disables the limit. If a - message larger than the maximum size is received, :meth:`recv` will - raise :exc:`~websockets.exceptions.ConnectionClosedError` and the - connection will be closed with code 1009. - - The ``max_queue`` parameter sets the maximum length of the queue that - holds incoming messages. The default value is ``32``. ``None`` disables - the limit. Messages are added to an in-memory queue when they're received; - then :meth:`recv` pops from that queue. In order to prevent excessive - memory consumption when messages are received faster than they can be - processed, the queue must be bounded. If the queue fills up, the protocol - stops processing incoming data until :meth:`recv` is called. In this - situation, various receive buffers (at least in ``asyncio`` and in the OS) - will fill up, then the TCP receive window will shrink, slowing down - transmission to avoid packet loss. - - Since Python can use up to 4 bytes of memory to represent a single - character, each connection may use up to ``4 * max_size * max_queue`` - bytes of memory to store incoming messages. By default, this is 128 MiB. - You may want to lower the limits, depending on your application's - requirements. - - The ``read_limit`` argument sets the high-water limit of the buffer for - incoming bytes. The low-water limit is half the high-water limit. The - default value is 64 KiB, half of asyncio's default (based on the current - implementation of :class:`~asyncio.StreamReader`). - - The ``write_limit`` argument sets the high-water limit of the buffer for - outgoing bytes. The low-water limit is a quarter of the high-water limit. - The default value is 64 KiB, equal to asyncio's default (based on the - current implementation of ``FlowControlMixin``). - - As soon as the HTTP request and response in the opening handshake are - processed: - - * the request path is available in the :attr:`path` attribute; - * the request and response HTTP headers are available in the - :attr:`request_headers` and :attr:`response_headers` attributes, - which are :class:`~websockets.http.Headers` instances. - - If a subprotocol was negotiated, it's available in the :attr:`subprotocol` - attribute. - - Once the connection is closed, the code is available in the - :attr:`close_code` attribute and the reason in :attr:`close_reason`. - - All these attributes must be treated as read-only. - """ # There are only two differences between the client-side and server-side diff --git a/src/websockets/legacy/server.py b/src/websockets/legacy/server.py index 8e5f97a66..e693bbd2f 100644 --- a/src/websockets/legacy/server.py +++ b/src/websockets/legacy/server.py @@ -62,11 +62,107 @@ class WebSocketServerProtocol(WebSocketCommonProtocol): """ :class:`~asyncio.Protocol` subclass implementing a WebSocket server. - This class inherits most of its methods from - :class:`~websockets.protocol.WebSocketCommonProtocol`. - - For the sake of simplicity, it doesn't rely on a full HTTP implementation. - Its support for HTTP responses is very limited. + :class:`WebSocketServerProtocol`: + + * performs the opening handshake to establish the connection; + * provides :meth:`recv` and :meth:`send` coroutines for receiving and + sending messages; + * deals with control frames automatically; + * performs the closing handshake to terminate the connection. + + You may customize the opening handshake by subclassing + :class:`WebSocketServer` and overriding: + + * :meth:`process_request` to intercept the client request before any + processing and, if appropriate, to abort the WebSocket request and + return a HTTP response instead; + * :meth:`select_subprotocol` to select a subprotocol, if the client and + the server have multiple subprotocols in common and the default logic + for choosing one isn't suitable (this is rarely needed). + + :class:`WebSocketServerProtocol` supports asynchronous iteration:: + + async for message in websocket: + await process(message) + + The iterator yields incoming messages. It exits normally when the + connection is closed with the close code 1000 (OK) or 1001 (going away). + It raises a :exc:`~websockets.exceptions.ConnectionClosedError` exception + when the connection is closed with any other code. + + Once the connection is open, a `Ping frame`_ is sent every + ``ping_interval`` seconds. This serves as a keepalive. It helps keeping + the connection open, especially in the presence of proxies with short + timeouts on inactive connections. Set ``ping_interval`` to ``None`` to + disable this behavior. + + .. _Ping frame: https://tools.ietf.org/html/rfc6455#section-5.5.2 + + If the corresponding `Pong frame`_ isn't received within ``ping_timeout`` + seconds, the connection is considered unusable and is closed with + code 1011. This ensures that the remote endpoint remains responsive. Set + ``ping_timeout`` to ``None`` to disable this behavior. + + .. _Pong frame: https://tools.ietf.org/html/rfc6455#section-5.5.3 + + The ``close_timeout`` parameter defines a maximum wait time for completing + the closing handshake and terminating the TCP connection. For legacy + reasons, :meth:`close` completes in at most ``4 * close_timeout`` seconds. + + ``close_timeout`` needs to be a parameter of the protocol because + websockets usually calls :meth:`close` implicitly when the connection + handler terminates. + + To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`. + + The ``max_size`` parameter enforces the maximum size for incoming messages + in bytes. The default value is 1 MiB. ``None`` disables the limit. If a + message larger than the maximum size is received, :meth:`recv` will + raise :exc:`~websockets.exceptions.ConnectionClosedError` and the + connection will be closed with code 1009. + + The ``max_queue`` parameter sets the maximum length of the queue that + holds incoming messages. The default value is ``32``. ``None`` disables + the limit. Messages are added to an in-memory queue when they're received; + then :meth:`recv` pops from that queue. In order to prevent excessive + memory consumption when messages are received faster than they can be + processed, the queue must be bounded. If the queue fills up, the protocol + stops processing incoming data until :meth:`recv` is called. In this + situation, various receive buffers (at least in :mod:`asyncio` and in the + OS) will fill up, then the TCP receive window will shrink, slowing down + transmission to avoid packet loss. + + Since Python can use up to 4 bytes of memory to represent a single + character, each connection may use up to ``4 * max_size * max_queue`` + bytes of memory to store incoming messages. By default, this is 128 MiB. + You may want to lower the limits, depending on your application's + requirements. + + The ``read_limit`` argument sets the high-water limit of the buffer for + incoming bytes. The low-water limit is half the high-water limit. The + default value is 64 KiB, half of asyncio's default (based on the current + implementation of :class:`~asyncio.StreamReader`). + + The ``write_limit`` argument sets the high-water limit of the buffer for + outgoing bytes. The low-water limit is a quarter of the high-water limit. + The default value is 64 KiB, equal to asyncio's default (based on the + current implementation of ``FlowControlMixin``). + + As soon as the HTTP request and response in the opening handshake are + processed: + + * the request path is available in the :attr:`path` attribute; + * the request and response HTTP headers are available in the + :attr:`request_headers` and :attr:`response_headers` attributes, + which are :class:`~websockets.http.Headers` instances. + + If a subprotocol was negotiated, it's available in the :attr:`subprotocol` + attribute. + + Once the connection is closed, the code is available in the + :attr:`close_code` attribute and the reason in :attr:`close_reason`. + + All attributes must be treated as read-only. """ @@ -487,7 +583,7 @@ def select_subprotocol( Instead of subclassing, it is possible to override this method by passing a ``select_subprotocol`` argument to the :func:`serve` - function or the :class:`WebSocketServerProtocol` constructor + function or the :class:`WebSocketServerProtocol` constructor. :param client_subprotocols: list of subprotocols offered by the client :param server_subprotocols: list of subprotocols available on the server @@ -780,66 +876,69 @@ class Serve: :exc:`~websockets.exceptions.ConnectionClosedOK` exception on their current or next interaction with the WebSocket connection. - :func:`serve` can also be used as an asynchronous context manager. In - this case, the server is shut down when exiting the context. + :func:`serve` can also be used as an asynchronous context manager:: + + stop = asyncio.Future() # set this future to exit the server + + async with serve(...): + await stop + + In this case, the server is shut down when exiting the context. :func:`serve` is a wrapper around the event loop's :meth:`~asyncio.loop.create_server` method. It creates and starts a - :class:`~asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it - wraps the :class:`~asyncio.Server` in a :class:`WebSocketServer` and + :class:`asyncio.Server` with :meth:`~asyncio.loop.create_server`. Then it + wraps the :class:`asyncio.Server` in a :class:`WebSocketServer` and returns the :class:`WebSocketServer`. - The ``ws_handler`` argument is the WebSocket handler. It must be a - coroutine accepting two arguments: a :class:`WebSocketServerProtocol` and - the request URI. + ``ws_handler`` is the WebSocket handler. It must be a coroutine accepting + two arguments: the WebSocket connection, which is an instance of + :class:`WebSocketServerProtocol`, and the path of the request. The ``host`` and ``port`` arguments, as well as unrecognized keyword - arguments, are passed along to :meth:`~asyncio.loop.create_server`. + arguments, are passed to :meth:`~asyncio.loop.create_server`. For example, you can set the ``ssl`` keyword argument to a :class:`~ssl.SSLContext` to enable TLS. - The ``create_protocol`` parameter allows customizing the - :class:`~asyncio.Protocol` that manages the connection. It should be a - callable or class accepting the same arguments as - :class:`WebSocketServerProtocol` and returning an instance of - :class:`WebSocketServerProtocol` or a subclass. It defaults to - :class:`WebSocketServerProtocol`. + ``create_protocol`` defaults to :class:`WebSocketServerProtocol`. It may + be replaced by a wrapper or a subclass to customize the protocol that + manages the connection. The behavior of ``ping_interval``, ``ping_timeout``, ``close_timeout``, ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit`` is - described in :class:`~websockets.protocol.WebSocketCommonProtocol`. + described in :class:`WebSocketServerProtocol`. :func:`serve` also accepts the following optional arguments: * ``compression`` is a shortcut to configure compression extensions; by default it enables the "permessage-deflate" extension; set it to - ``None`` to disable compression - * ``origins`` defines acceptable Origin HTTP headers; include ``None`` if - the lack of an origin is acceptable + ``None`` to disable compression. + * ``origins`` defines acceptable Origin HTTP headers; include ``None`` in + the list if the lack of an origin is acceptable. * ``extensions`` is a list of supported extensions in order of - decreasing preference + decreasing preference. * ``subprotocols`` is a list of supported subprotocols in order of - decreasing preference + decreasing preference. * ``extra_headers`` sets additional HTTP response headers when the handshake succeeds; it can be a :class:`~websockets.http.Headers` instance, a :class:`~collections.abc.Mapping`, an iterable of ``(name, value)`` pairs, or a callable taking the request path and headers in - arguments and returning one of the above + arguments and returning one of the above. * ``process_request`` allows intercepting the HTTP request; it must be a coroutine taking the request path and headers in argument; see - :meth:`~WebSocketServerProtocol.process_request` for details + :meth:`~WebSocketServerProtocol.process_request` for details. * ``select_subprotocol`` allows customizing the logic for selecting a subprotocol; it must be a callable taking the subprotocols offered by the client and available on the server in argument; see - :meth:`~WebSocketServerProtocol.select_subprotocol` for details + :meth:`~WebSocketServerProtocol.select_subprotocol` for details. Since there's no useful way to propagate exceptions triggered in handlers, - they're sent to the ``'websockets.legacy.server'`` logger instead. + they're sent to the ``"websockets.server"`` logger instead. Debugging is much easier if you configure logging to print them:: import logging - logger = logging.getLogger("websockets.legacy.server") + logger = logging.getLogger("websockets.server") logger.setLevel(logging.ERROR) logger.addHandler(logging.StreamHandler()) From 927287380011e4388c11c24d286beef2b877284d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 22:49:58 +0200 Subject: [PATCH 263/281] Work around coverage bug. --- src/websockets/legacy/protocol.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/websockets/legacy/protocol.py b/src/websockets/legacy/protocol.py index a46e3dc4e..e4c6d63c5 100644 --- a/src/websockets/legacy/protocol.py +++ b/src/websockets/legacy/protocol.py @@ -549,7 +549,9 @@ async def send( # Other fragments. # https://github.com/python/mypy/issues/5738 - async for message_chunk in aiter_message: # type: ignore + # coverage reports this code as not covered, but it is + # exercised by tests - changing it breaks the tests! + async for message_chunk in aiter_message: # type: ignore # pragma: no cover # noqa confirm_opcode, data = prepare_data(message_chunk) if confirm_opcode != opcode: raise TypeError("data contains inconsistent types") From 5ab214b00f38cae3976fce5a315fbfa30762b60d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 22:39:12 +0200 Subject: [PATCH 264/281] Bump version number --- docs/changelog.rst | 7 ++++++- docs/conf.py | 4 ++-- src/websockets/version.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 91ea23dc9..2644d3735 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -25,11 +25,16 @@ fixing regressions shortly after a release. Only documented APIs are public. Undocumented APIs are considered private. They may change at any time. -9.0 +9.1 ... *In development* +9.0 +... + +*May 1, 2021* + .. note:: **Version 9.0 moves or deprecates several APIs.** diff --git a/docs/conf.py b/docs/conf.py index 0c00b96fb..dad7475f7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -version = '8.1' +version = '9.0' # The full version, including alpha/beta/rc tags. -release = '8.1' +release = '9.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index 7377332e1..94d9f2ead 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "8.1" +version = "9.0" From 5d6fcf96cd81680e35cba00ed52cb12bf2c8f544 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 1 May 2021 22:59:03 +0200 Subject: [PATCH 265/281] Python 3.9 is now released. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7be85d7f9..8a1441209 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,7 +40,7 @@ jobs: - run: tox -e py38 py39: docker: - - image: circleci/python:3.9.0b1 + - image: circleci/python:3.9 steps: # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc From 56be5f71e273fee7a2ef86166838f574b58e3c59 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 15:36:56 +0200 Subject: [PATCH 266/281] Build wheels on Python 3.9. --- .appveyor.yml | 2 +- .travis.yml | 2 +- setup.cfg | 2 +- setup.py | 1 + 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.appveyor.yml b/.appveyor.yml index d34b15aed..ef17ebba5 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -6,7 +6,7 @@ skip_branch_with_pr: true environment: # websockets only works on Python >= 3.6. - CIBW_BUILD: cp36-* cp37-* cp38-* + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* CIBW_TEST_COMMAND: python -W default -m unittest WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 diff --git a/.travis.yml b/.travis.yml index e31c9ea0b..f2bfc724e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ env: global: # websockets only works on Python >= 3.6. - - CIBW_BUILD="cp36-* cp37-* cp38-*" + - CIBW_BUILD="cp36-* cp37-* cp38-* cp39-*"" - CIBW_TEST_COMMAND="python3 -W default -m unittest" - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 diff --git a/setup.cfg b/setup.cfg index 5448b0f9b..04b792989 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bdist_wheel] -python-tag = py36.py37 +python-tag = py36.py37.py38.py39 [metadata] license_file = LICENSE diff --git a/setup.py b/setup.py index 85d899cb4..5adb8e835 100644 --- a/setup.py +++ b/setup.py @@ -54,6 +54,7 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', ], package_dir = {'': 'src'}, package_data = {'websockets': ['py.typed']}, From cbae1fb00e07a880bc7e9b566249afa474469c0d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 15:36:56 +0200 Subject: [PATCH 267/281] Setup GitHub actions. --- .github/workflows/tests.yml | 52 +++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..eb06ebfea --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,52 @@ +name: Run tests + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + main: + name: Run code quality checks + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v2 + - name: Install Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Install tox + run: pip install tox + - name: Run tests with coverage + run: tox -e coverage + - name: Check code formatting + run: tox -e black + - name: Check code style + run: tox -e flake8 + - name: Check imports ordering + run: tox -e isort + - name: Check types statically + run: tox -e mypy + + matrix: + name: Run tests on Python ${{ matrix.python }} + needs: main + runs-on: ubuntu-latest + strategy: + matrix: + python: [3.6, 3.7, 3.8, 3.9] + steps: + - name: Check out repository + uses: actions/checkout@v2 + - name: Install Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install tox + run: pip install tox + - name: Run tests + run: tox -e py From 3d55449d5df642d6be401c21afee450edb8c4422 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 17:57:53 +0200 Subject: [PATCH 268/281] Drop CircleCI setup. --- .circleci/config.yml | 67 -------------------------------------------- 1 file changed, 67 deletions(-) delete mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 8a1441209..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,67 +0,0 @@ -version: 2 - -jobs: - main: - docker: - - image: circleci/python:3.7 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox codecov - - run: tox -e coverage,black,flake8,isort,mypy - - run: codecov - py36: - docker: - - image: circleci/python:3.6 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py36 - py37: - docker: - - image: circleci/python:3.7 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py37 - py38: - docker: - - image: circleci/python:3.8 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py38 - py39: - docker: - - image: circleci/python:3.9 - steps: - # Remove IPv6 entry for localhost in Circle CI containers because it doesn't work anyway. - - run: sudo cp /etc/hosts /tmp; sudo sed -i '/::1/d' /tmp/hosts; sudo cp /tmp/hosts /etc - - checkout - - run: sudo pip install tox - - run: tox -e py39 - -workflows: - version: 2 - build: - jobs: - - main - - py36: - requires: - - main - - py37: - requires: - - main - - py38: - requires: - - main - - py39: - requires: - - main From c3d7b7f6565bd2a40aa5cdd5d0e44642148d41e2 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 17:57:59 +0200 Subject: [PATCH 269/281] Change badge in README. --- README.rst | 9 +++------ docs/index.rst | 9 +++------ 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/README.rst b/README.rst index 1e15ba198..bda73c640 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ :width: 480px :alt: websockets -|rtd| |pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov| +|rtd| |pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |tests| .. |rtd| image:: https://readthedocs.org/projects/websockets/badge/?version=latest :target: https://websockets.readthedocs.io/ @@ -19,11 +19,8 @@ .. |pypi-wheel| image:: https://img.shields.io/pypi/wheel/websockets.svg :target: https://pypi.python.org/pypi/websockets -.. |circleci| image:: https://img.shields.io/circleci/project/github/aaugustin/websockets.svg - :target: https://circleci.com/gh/aaugustin/websockets - -.. |codecov| image:: https://codecov.io/gh/aaugustin/websockets/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aaugustin/websockets +.. |tests| image:: https://github.com/aaugustin/websockets/workflows/tests/badge.svg?branch=master + :target: https://github.com/aaugustin/websockets/actions?workflow=tests What is ``websockets``? ----------------------- diff --git a/docs/index.rst b/docs/index.rst index e121fd930..5914d7289 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,7 +1,7 @@ websockets ========== -|pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |circleci| |codecov| +|pypi-v| |pypi-pyversions| |pypi-l| |pypi-wheel| |tests| .. |pypi-v| image:: https://img.shields.io/pypi/v/websockets.svg :target: https://pypi.python.org/pypi/websockets @@ -15,11 +15,8 @@ websockets .. |pypi-wheel| image:: https://img.shields.io/pypi/wheel/websockets.svg :target: https://pypi.python.org/pypi/websockets -.. |circleci| image:: https://img.shields.io/circleci/project/github/aaugustin/websockets.svg - :target: https://circleci.com/gh/aaugustin/websockets - -.. |codecov| image:: https://codecov.io/gh/aaugustin/websockets/branch/master/graph/badge.svg - :target: https://codecov.io/gh/aaugustin/websockets +.. |tests| image:: https://github.com/aaugustin/websockets/workflows/tests/badge.svg?branch=master + :target: https://github.com/aaugustin/websockets/actions?workflow=tests ``websockets`` is a library for building WebSocket servers_ and clients_ in Python with a focus on correctness and simplicity. From a45cc5afe067925759a2644bd9ef9b5346adefa1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 20:16:16 +0200 Subject: [PATCH 270/281] Build distributions on GitHub actions. --- .github/workflows/wheels.yml | 71 ++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 .github/workflows/wheels.yml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 000000000..7ea97c61f --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,71 @@ +name: Build wheels + +on: + push: + branches: + - main + tags: + - '*' + +jobs: + sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v2 + - name: Install Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Build sdist + run: python setup.py sdist + - name: Save sdist + uses: actions/upload-artifact@v2 + with: + path: dist/*.tar.gz + + wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, windows-2019, macOS-10.15] + + steps: + - name: Check out repository + uses: actions/checkout@v2 + - name: Make extension build mandatory + run: touch .cibuildwheel + - name: Install Python 3.x + uses: actions/setup-python@v2 + with: + python-version: 3.x + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v1 + with: + platforms: all + - name: Build wheels + uses: joerick/cibuildwheel@v1.11.0 + env: + CIBW_ARCHS_LINUX: auto aarch64 + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* + - name: Save wheels + uses: actions/upload-artifact@v2 + with: + path: wheelhouse/*.whl + + upload_pypi: + name: Upload to PyPI + needs: [sdist, wheels] + runs-on: ubuntu-latest + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') + steps: + - uses: actions/download-artifact@v2 + with: + name: artifact + path: dist + - uses: pypa/gh-action-pypi-publish@master + with: + password: ${{ secrets.PYPI_API_TOKEN }} From b0d211d0f32633977e73f51a1573e6a07319a0b0 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 20:23:38 +0200 Subject: [PATCH 271/281] Drop Travis CI and Appveyor setup. --- .appveyor.yml | 27 --------------------------- .travis.yml | 43 ------------------------------------------- 2 files changed, 70 deletions(-) delete mode 100644 .appveyor.yml delete mode 100644 .travis.yml diff --git a/.appveyor.yml b/.appveyor.yml deleted file mode 100644 index ef17ebba5..000000000 --- a/.appveyor.yml +++ /dev/null @@ -1,27 +0,0 @@ -branches: - only: - - master - -skip_branch_with_pr: true - -environment: -# websockets only works on Python >= 3.6. - CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* - CIBW_TEST_COMMAND: python -W default -m unittest - WEBSOCKETS_TESTS_TIMEOUT_FACTOR: 100 - -install: -# Ensure python is Python 3. - - set PATH=C:\Python37;%PATH% - - cmd: python -m pip install --upgrade cibuildwheel -# Create file '.cibuildwheel' so that extension build is not optional (c.f. setup.py). - - cmd: touch .cibuildwheel - -build_script: - - cmd: python -m cibuildwheel --output-dir wheelhouse -# Upload to PyPI on tags - - ps: >- - if ($env:APPVEYOR_REPO_TAG -eq "true") { - Invoke-Expression "python -m pip install twine" - Invoke-Expression "python -m twine upload --skip-existing wheelhouse/*.whl" - } diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f2bfc724e..000000000 --- a/.travis.yml +++ /dev/null @@ -1,43 +0,0 @@ -env: - global: - # websockets only works on Python >= 3.6. - - CIBW_BUILD="cp36-* cp37-* cp38-* cp39-*"" - - CIBW_TEST_COMMAND="python3 -W default -m unittest" - - WEBSOCKETS_TESTS_TIMEOUT_FACTOR=100 - -matrix: - include: - - language: python - dist: xenial # required for Python 3.7 (travis-ci/travis-ci#9069) - sudo: required - python: "3.7" - services: - - docker - - language: python - dist: xenial - sudo: required - python: "3.7" - arch: arm64 - services: - - docker - - os: osx - osx_image: xcode8.3 - -install: -# Python 3 is needed to run cibuildwheel for websockets. - - if [ "${TRAVIS_OS_NAME:-}" == "osx" ]; then - brew update; - brew upgrade python; - fi -# Install cibuildwheel using pip3 to make sure Python 3 is used. - - pip3 install --upgrade cibuildwheel -# Create file '.cibuildwheel' so that extension build is not optional (c.f. setup.py). - - touch .cibuildwheel - -script: - - cibuildwheel --output-dir wheelhouse -# Upload to PyPI on tags - - if [ "${TRAVIS_TAG:-}" != "" ]; then - pip3 install twine; - python3 -m twine upload --skip-existing wheelhouse/*; - fi From fc176f462b6a5ef4f470df415780b09fed5da7c1 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 2 May 2021 20:49:05 +0200 Subject: [PATCH 272/281] Bump version number. --- docs/changelog.rst | 7 +++++++ src/websockets/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 2644d3735..1e5f92211 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,13 @@ They may change at any time. *In development* +9.0.1 +..... + +*May 2, 2021* + +* Fixed issues with the packaging of the 9.0 release. + 9.0 ... diff --git a/src/websockets/version.py b/src/websockets/version.py index 94d9f2ead..23b7f329b 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "9.0" +version = "9.0.1" From 217ac2d19174c6f01d9524648eb4058985f72754 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 13 May 2021 22:37:31 +0200 Subject: [PATCH 273/281] Fix broken link. Fix #953. --- docs/design.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/design.rst b/docs/design.rst index 0cabc2e5d..61b42b528 100644 --- a/docs/design.rst +++ b/docs/design.rst @@ -13,7 +13,7 @@ wish to understand what happens under the hood. Internals described in this document may change at any time. - Backwards compatibility is only guaranteed for `public APIs `_. + Backwards compatibility is only guaranteed for :doc:`public APIs `. Lifecycle From 70fadbf97c5a117ca13f6c8f4f111ba5025f3c94 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 13 May 2021 22:41:46 +0200 Subject: [PATCH 274/281] Restore compatibility with Python < 3.9. Fix #951. --- docs/changelog.rst | 7 +++++++ src/websockets/__main__.py | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1e5f92211..fb40aee2a 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,13 @@ They may change at any time. *In development* +9.0.2 +..... + +*In development* + +* Restored compatibility of ``python -m websockets`` with Python < 3.9. + 9.0.1 ..... diff --git a/src/websockets/__main__.py b/src/websockets/__main__.py index d44e34e74..fb126997a 100644 --- a/src/websockets/__main__.py +++ b/src/websockets/__main__.py @@ -178,11 +178,11 @@ def main() -> None: # Due to zealous removal of the loop parameter in the Queue constructor, # we need a factory coroutine to run in the freshly created event loop. - async def queue_factory() -> asyncio.Queue[str]: + async def queue_factory() -> "asyncio.Queue[str]": return asyncio.Queue() # Create a queue of user inputs. There's no need to limit its size. - inputs: asyncio.Queue[str] = loop.run_until_complete(queue_factory()) + inputs: "asyncio.Queue[str]" = loop.run_until_complete(queue_factory()) # Create a stop condition when receiving SIGINT or SIGTERM. stop: asyncio.Future[None] = loop.create_future() From e44e085e030d186c7bb9822becfbb5423aefe971 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 7 May 2021 21:21:37 +0200 Subject: [PATCH 275/281] Use relative imports everywhere, for consistency. Fix #946. --- docs/api/extensions.rst | 2 +- docs/extensions.rst | 7 +++---- src/websockets/extensions/__init__.py | 4 ++++ src/websockets/frames.py | 6 +++--- src/websockets/legacy/framing.py | 6 +++--- 5 files changed, 14 insertions(+), 11 deletions(-) diff --git a/docs/api/extensions.rst b/docs/api/extensions.rst index 635c5c426..71f015bb2 100644 --- a/docs/api/extensions.rst +++ b/docs/api/extensions.rst @@ -13,7 +13,7 @@ Per-Message Deflate Abstract classes ---------------- -.. automodule:: websockets.extensions.base +.. automodule:: websockets.extensions .. autoclass:: Extension :members: diff --git a/docs/extensions.rst b/docs/extensions.rst index 151a7e297..042ed3d9a 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -91,9 +91,8 @@ As a consequence, writing an extension requires implementing several classes: ``websockets`` provides abstract base classes for extension factories and extensions. See the API documentation for details on their methods: -* :class:`~base.ClientExtensionFactory` and - :class:`~base.ServerExtensionFactory` for extension factories, - -* :class:`~base.Extension` for extensions. +* :class:`ClientExtensionFactory` and class:`ServerExtensionFactory` for + :extension factories, +* :class:`Extension` for extensions. diff --git a/src/websockets/extensions/__init__.py b/src/websockets/extensions/__init__.py index e69de29bb..02838b98a 100644 --- a/src/websockets/extensions/__init__.py +++ b/src/websockets/extensions/__init__.py @@ -0,0 +1,4 @@ +from .base import * + + +__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"] diff --git a/src/websockets/frames.py b/src/websockets/frames.py index 71783e176..6e5ef1b73 100644 --- a/src/websockets/frames.py +++ b/src/websockets/frames.py @@ -103,7 +103,7 @@ def parse( *, mask: bool, max_size: Optional[int] = None, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + extensions: Optional[Sequence["extensions.Extension"]] = None, ) -> Generator[None, None, "Frame"]: """ Read a WebSocket frame. @@ -172,7 +172,7 @@ def serialize( self, *, mask: bool, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + extensions: Optional[Sequence["extensions.Extension"]] = None, ) -> bytes: """ Write a WebSocket frame. @@ -338,4 +338,4 @@ def check_close(code: int) -> None: # at the bottom to allow circular import, because Extension depends on Frame -import websockets.extensions.base # isort:skip # noqa +from . import extensions # isort:skip # noqa diff --git a/src/websockets/legacy/framing.py b/src/websockets/legacy/framing.py index e41c295dd..627e6922c 100644 --- a/src/websockets/legacy/framing.py +++ b/src/websockets/legacy/framing.py @@ -31,7 +31,7 @@ async def read( *, mask: bool, max_size: Optional[int] = None, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + extensions: Optional[Sequence["extensions.Extension"]] = None, ) -> "Frame": """ Read a WebSocket frame. @@ -102,7 +102,7 @@ def write( write: Callable[[bytes], Any], *, mask: bool, - extensions: Optional[Sequence["websockets.extensions.base.Extension"]] = None, + extensions: Optional[Sequence["extensions.Extension"]] = None, ) -> None: """ Write a WebSocket frame. @@ -132,4 +132,4 @@ def write( # at the bottom to allow circular import, because Extension depends on Frame -import websockets.extensions.base # isort:skip # noqa +from .. import extensions # isort:skip # noqa From b99c4fe390a22cc846ce550a29f2c9841e99660d Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 7 May 2021 21:44:51 +0200 Subject: [PATCH 276/281] Restore real imports for compatibility with mypy. Fix #940. --- docs/api/client.rst | 2 +- docs/api/index.rst | 9 +++++++-- docs/api/server.rst | 4 ++-- docs/changelog.rst | 16 ++++++++++++++++ docs/extensions.rst | 5 ++--- src/websockets/auth.py | 2 ++ src/websockets/client.py | 12 +++--------- src/websockets/server.py | 12 ++---------- 8 files changed, 35 insertions(+), 27 deletions(-) create mode 100644 src/websockets/auth.py diff --git a/docs/api/client.rst b/docs/api/client.rst index f969227a9..db8cbc914 100644 --- a/docs/api/client.rst +++ b/docs/api/client.rst @@ -1,7 +1,7 @@ Client ====== -.. automodule:: websockets.legacy.client +.. automodule:: websockets.client Opening a connection -------------------- diff --git a/docs/api/index.rst b/docs/api/index.rst index 20bb740b3..0a616cbce 100644 --- a/docs/api/index.rst +++ b/docs/api/index.rst @@ -46,5 +46,10 @@ both in the client API and server API. utilities All public APIs can be imported from the :mod:`websockets` package, unless -noted otherwise. Anything that isn't listed in this API documentation is a -private API, with no guarantees of behavior or backwards-compatibility. +noted otherwise. This convenience feature is incompatible with static code +analysis tools such as mypy_, though. + +.. _mypy: https://github.com/python/mypy + +Anything that isn't listed in this API documentation is a private API. There's +no guarantees of behavior or backwards-compatibility for private APIs. diff --git a/docs/api/server.rst b/docs/api/server.rst index 16c8f6359..9e7b801a9 100644 --- a/docs/api/server.rst +++ b/docs/api/server.rst @@ -1,7 +1,7 @@ Server ====== -.. automodule:: websockets.legacy.server +.. automodule:: websockets.server Starting a server ----------------- @@ -90,7 +90,7 @@ Server Basic authentication -------------------- -.. automodule:: websockets.legacy.auth +.. automodule:: websockets.auth .. autofunction:: basic_auth_protocol_factory diff --git a/docs/changelog.rst b/docs/changelog.rst index fb40aee2a..218bbec3d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -37,6 +37,8 @@ They may change at any time. * Restored compatibility of ``python -m websockets`` with Python < 3.9. +* Restored compatibility with mypy. + 9.0.1 ..... @@ -73,6 +75,20 @@ They may change at any time. but that never happened. Keeping these APIs public makes it more difficult to improve websockets for no actual benefit. +.. note:: + + **Version 9.0 may require changes if you use static code analysis tools.** + + Convenience imports from the ``websockets`` module are performed lazily. + While this is supported by Python, static code analysis tools such as mypy + are unable to understand the behavior. + + If you depend on such tools, use the real import path, which can be found + in the API documentation:: + + from websockets.client import connect + from websockets.server import serve + * Added compatibility with Python 3.9. * Added support for IRIs in addition to URIs. diff --git a/docs/extensions.rst b/docs/extensions.rst index 042ed3d9a..f5e2f497f 100644 --- a/docs/extensions.rst +++ b/docs/extensions.rst @@ -14,9 +14,8 @@ specification, WebSocket Per-Message Deflate, specified in :rfc:`7692`. Per-Message Deflate ------------------- -:func:`~websockets.legacy.client.connect` and -:func:`~websockets.legacy.server.serve` enable the Per-Message Deflate -extension by default. +:func:`~websockets.client.connect` and :func:`~websockets.server.serve` enable +the Per-Message Deflate extension by default. If you want to disable it, set ``compression=None``:: diff --git a/src/websockets/auth.py b/src/websockets/auth.py new file mode 100644 index 000000000..f97c1feb0 --- /dev/null +++ b/src/websockets/auth.py @@ -0,0 +1,2 @@ +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.auth import * # noqa diff --git a/src/websockets/client.py b/src/websockets/client.py index 91dd1662e..0ddf19f00 100644 --- a/src/websockets/client.py +++ b/src/websockets/client.py @@ -24,7 +24,6 @@ ) from .http import USER_AGENT, build_host from .http11 import Request, Response -from .imports import lazy_import from .typing import ( ConnectionOption, ExtensionHeader, @@ -36,14 +35,9 @@ from .utils import accept_key, generate_key -lazy_import( - globals(), - aliases={ - "connect": ".legacy.client", - "unix_connect": ".legacy.client", - "WebSocketClientProtocol": ".legacy.client", - }, -) +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.client import * # isort:skip # noqa + __all__ = ["ClientConnection"] diff --git a/src/websockets/server.py b/src/websockets/server.py index 67ab83031..f57d36b70 100644 --- a/src/websockets/server.py +++ b/src/websockets/server.py @@ -26,7 +26,6 @@ ) from .http import USER_AGENT from .http11 import Request, Response -from .imports import lazy_import from .typing import ( ConnectionOption, ExtensionHeader, @@ -37,15 +36,8 @@ from .utils import accept_key -lazy_import( - globals(), - aliases={ - "serve": ".legacy.server", - "unix_serve": ".legacy.server", - "WebSocketServerProtocol": ".legacy.server", - "WebSocketServer": ".legacy.server", - }, -) +# See #940 for why lazy_import isn't used here for backwards compatibility. +from .legacy.server import * # isort:skip # noqa __all__ = ["ServerConnection"] From 0713dbf2d37a8c2c071d8479a6768dd3d3c7dacf Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Fri, 14 May 2021 07:54:58 +0200 Subject: [PATCH 277/281] Add test coverage. --- tests/test_auth.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 tests/test_auth.py diff --git a/tests/test_auth.py b/tests/test_auth.py new file mode 100644 index 000000000..d5a8bd9ad --- /dev/null +++ b/tests/test_auth.py @@ -0,0 +1 @@ +from websockets.auth import * # noqa From 8900c13d3234c8ae87b0d852e849eaf6bf7cf8b7 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 15 May 2021 14:19:47 +0200 Subject: [PATCH 278/281] Add mypy to dictionary. --- docs/spelling_wordlist.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index d7c744147..4d8fc1e2d 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -31,6 +31,7 @@ lifecycle Lifecycle lookups MiB +mypy nginx parsers permessage From a14226afb77b524c2ced7d649ac7420a14992716 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sat, 15 May 2021 18:01:23 +0200 Subject: [PATCH 279/281] Bump version number. --- docs/changelog.rst | 2 +- src/websockets/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 218bbec3d..1064af736 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -33,7 +33,7 @@ They may change at any time. 9.0.2 ..... -*In development* +*May 15, 2021* * Restored compatibility of ``python -m websockets`` with Python < 3.9. diff --git a/src/websockets/version.py b/src/websockets/version.py index 23b7f329b..02dbe9d3c 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "9.0.1" +version = "9.0.2" From 547a26b685d08cac0aa64e5e65f7867ac0ea9bc0 Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Sun, 23 May 2021 18:51:27 +0200 Subject: [PATCH 280/281] Use constant-time comparison for passwords. Backport of c91b4c2a and dfecbd03. --- docs/changelog.rst | 6 ++++++ src/websockets/legacy/auth.py | 28 +++++++++++++++------------- tests/legacy/test_auth.py | 11 +++++++++-- 3 files changed, 30 insertions(+), 15 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index 1064af736..f3e1acf08 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -30,6 +30,12 @@ They may change at any time. *In development* +.. note:: + + **Version 9.1 fixes a security issue introduced in version 8.0.** + + Version 8.0 was vulnerable to timing attacks on HTTP Basic Auth passwords. + 9.0.2 ..... diff --git a/src/websockets/legacy/auth.py b/src/websockets/legacy/auth.py index e0beede57..80ceff28d 100644 --- a/src/websockets/legacy/auth.py +++ b/src/websockets/legacy/auth.py @@ -6,6 +6,7 @@ import functools +import hmac import http from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast @@ -132,24 +133,23 @@ def basic_auth_protocol_factory( if credentials is not None: if is_credentials(credentials): - - async def check_credentials(username: str, password: str) -> bool: - return (username, password) == credentials - + credentials_list = [cast(Credentials, credentials)] elif isinstance(credentials, Iterable): credentials_list = list(credentials) - if all(is_credentials(item) for item in credentials_list): - credentials_dict = dict(credentials_list) - - async def check_credentials(username: str, password: str) -> bool: - return credentials_dict.get(username) == password - - else: + if not all(is_credentials(item) for item in credentials_list): raise TypeError(f"invalid credentials argument: {credentials}") - else: raise TypeError(f"invalid credentials argument: {credentials}") + credentials_dict = dict(credentials_list) + + async def check_credentials(username: str, password: str) -> bool: + try: + expected_password = credentials_dict[username] + except KeyError: + return False + return hmac.compare_digest(expected_password, password) + if create_protocol is None: # Not sure why mypy cannot figure this out. create_protocol = cast( @@ -158,5 +158,7 @@ async def check_credentials(username: str, password: str) -> bool: ) return functools.partial( - create_protocol, realm=realm, check_credentials=check_credentials + create_protocol, + realm=realm, + check_credentials=check_credentials, ) diff --git a/tests/legacy/test_auth.py b/tests/legacy/test_auth.py index bb8c6a6eb..3d8eb90d7 100644 --- a/tests/legacy/test_auth.py +++ b/tests/legacy/test_auth.py @@ -1,3 +1,4 @@ +import hmac import unittest import urllib.error @@ -76,7 +77,7 @@ def test_basic_auth_bad_multiple_credentials(self): ) async def check_credentials(username, password): - return password == "iloveyou" + return hmac.compare_digest(password, "iloveyou") create_protocol_check_credentials = basic_auth_protocol_factory( realm="auth-tests", @@ -140,7 +141,13 @@ def test_basic_auth_unsupported_credentials_details(self): self.assertEqual(raised.exception.read().decode(), "Unsupported credentials\n") @with_server(create_protocol=create_protocol) - def test_basic_auth_invalid_credentials(self): + def test_basic_auth_invalid_username(self): + with self.assertRaises(InvalidStatusCode) as raised: + self.start_client(user_info=("goodbye", "iloveyou")) + self.assertEqual(raised.exception.status_code, 401) + + @with_server(create_protocol=create_protocol) + def test_basic_auth_invalid_password(self): with self.assertRaises(InvalidStatusCode) as raised: self.start_client(user_info=("hello", "ihateyou")) self.assertEqual(raised.exception.status_code, 401) From d0f328888f3e695aa64d78dcf48af4ece219221b Mon Sep 17 00:00:00 2001 From: Aymeric Augustin Date: Thu, 27 May 2021 13:32:46 +0200 Subject: [PATCH 281/281] Bump version number. --- docs/changelog.rst | 2 +- docs/conf.py | 4 ++-- src/websockets/version.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/changelog.rst b/docs/changelog.rst index f3e1acf08..a82008a49 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -28,7 +28,7 @@ They may change at any time. 9.1 ... -*In development* +*May 27, 2021* .. note:: diff --git a/docs/conf.py b/docs/conf.py index dad7475f7..2246c0287 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -version = '9.0' +version = '9.1' # The full version, including alpha/beta/rc tags. -release = '9.0' +release = '9.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/src/websockets/version.py b/src/websockets/version.py index 02dbe9d3c..a7901ef92 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -1 +1 @@ -version = "9.0.2" +version = "9.1"