Bump cheroot from 8.5.2 to 8.6.0 (#1603)

* Bump cheroot from 8.5.2 to 8.6.0

Bumps [cheroot](https://github.com/cherrypy/cheroot) from 8.5.2 to 8.6.0.
- [Release notes](https://github.com/cherrypy/cheroot/releases)
- [Changelog](https://github.com/cherrypy/cheroot/blob/master/CHANGES.rst)
- [Commits](https://github.com/cherrypy/cheroot/compare/v8.5.2...v8.6.0)

---
updated-dependencies:
- dependency-name: cheroot
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Update cheroot==8.6.0

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: JonnyWong16 <9099342+JonnyWong16@users.noreply.github.com>
This commit is contained in:
dependabot[bot] 2022-01-04 13:20:50 -08:00 committed by GitHub
parent bb5ebe0fa5
commit 3689834051
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 708 additions and 113 deletions

1
lib/cheroot/__init__.pyi Normal file
View file

@ -0,0 +1 @@
__version__: str

32
lib/cheroot/cli.pyi Normal file
View file

@ -0,0 +1,32 @@
from typing import Any
class BindLocation: ...
class TCPSocket(BindLocation):
bind_addr: Any
def __init__(self, address, port) -> None: ...
class UnixSocket(BindLocation):
bind_addr: Any
def __init__(self, path) -> None: ...
class AbstractSocket(BindLocation):
bind_addr: Any
def __init__(self, abstract_socket) -> None: ...
class Application:
@classmethod
def resolve(cls, full_path): ...
wsgi_app: Any
def __init__(self, wsgi_app) -> None: ...
def server_args(self, parsed_args): ...
def server(self, parsed_args): ...
class GatewayYo:
gateway: Any
def __init__(self, gateway) -> None: ...
def server(self, parsed_args): ...
def parse_wsgi_bind_location(bind_addr_string: str): ...
def parse_wsgi_bind_addr(bind_addr_string: str): ...
def main() -> None: ...

View file

@ -12,6 +12,7 @@ import time
from . import errors
from ._compat import selectors
from ._compat import suppress
from ._compat import IS_WINDOWS
from .makefile import MakeFile
import six
@ -152,17 +153,18 @@ class ConnectionManager:
conn.socket.fileno(), selectors.EVENT_READ, data=conn,
)
def _expire(self):
"""Expire least recently used connections.
def _expire(self, threshold):
r"""Expire least recently used connections.
This happens if there are either too many open connections, or if the
connections have been timed out.
:param threshold: Connections that have not been used within this \
duration (in seconds), are considered expired and \
are closed and removed.
:type threshold: float
This should be called periodically.
"""
# find any connections still registered with the selector
# that have not been active recently enough.
threshold = time.time() - self.server.timeout
timed_out_connections = [
(sock_fd, conn)
for (sock_fd, conn) in self._selector.connections
@ -203,11 +205,37 @@ class ConnectionManager:
self._serving = False
def _run(self, expiration_interval):
r"""Run connection handler loop until stop was requested.
:param expiration_interval: Interval, in seconds, at which \
connections will be checked for \
expiration.
:type expiration_interval: float
Use ``expiration_interval`` as ``select()`` timeout
to assure expired connections are closed in time.
On Windows cap the timeout to 0.05 seconds
as ``select()`` does not return when a socket is ready.
"""
last_expiration_check = time.time()
if IS_WINDOWS:
# 0.05 seconds are used as an empirically obtained balance between
# max connection delay and idle system load. Benchmarks show a
# mean processing time per connection of ~0.03 seconds on Linux
# and with 0.01 seconds timeout on Windows:
# https://github.com/cherrypy/cheroot/pull/352
# While this highly depends on system and hardware, 0.05 seconds
# max delay should hence usually not significantly increase the
# mean time/delay per connection, but significantly reduce idle
# system load by reducing socket loops to 1/5 with 0.01 seconds.
select_timeout = min(expiration_interval, 0.05)
else:
select_timeout = expiration_interval
while not self._stop_requested:
try:
active_list = self._selector.select(timeout=0.01)
active_list = self._selector.select(timeout=select_timeout)
except OSError:
self._remove_invalid_sockets()
continue
@ -226,7 +254,7 @@ class ConnectionManager:
now = time.time()
if (now - last_expiration_check) > expiration_interval:
self._expire()
self._expire(threshold=now - self.server.timeout)
last_expiration_check = now
def _remove_invalid_sockets(self):

View file

@ -0,0 +1,23 @@
from typing import Any
def prevent_socket_inheritance(sock) -> None: ...
class _ThreadsafeSelector:
def __init__(self) -> None: ...
def __len__(self): ...
@property
def connections(self) -> None: ...
def register(self, fileobj, events, data: Any | None = ...): ...
def unregister(self, fileobj): ...
def select(self, timeout: Any | None = ...): ...
def close(self) -> None: ...
class ConnectionManager:
server: Any
def __init__(self, server) -> None: ...
def put(self, conn) -> None: ...
def stop(self) -> None: ...
def run(self, expiration_interval) -> None: ...
def close(self) -> None: ...
@property
def can_add_keepalive_connection(self): ...

13
lib/cheroot/errors.pyi Normal file
View file

@ -0,0 +1,13 @@
from typing import Any, List, Set, Tuple
class MaxSizeExceeded(Exception): ...
class NoSSLError(Exception): ...
class FatalSSLAlert(Exception): ...
def plat_specific_errors(*errnames: str) -> List[int]: ...
socket_error_eintr: List[int]
socket_errors_to_ignore: List[int]
socket_errors_nonblocking: List[int]
acceptable_sock_shutdown_error_codes: Set[int]
acceptable_sock_shutdown_exceptions: Tuple[Exception]

32
lib/cheroot/makefile.pyi Normal file
View file

@ -0,0 +1,32 @@
import io
SOCK_WRITE_BLOCKSIZE: int
class BufferedWriter(io.BufferedWriter):
def write(self, b): ...
class MakeFile_PY2:
bytes_read: int
bytes_written: int
def __init__(self, *args, **kwargs) -> None: ...
def write(self, data) -> None: ...
def send(self, data): ...
def flush(self) -> None: ...
def recv(self, size): ...
class FauxSocket: ...
def read(self, size: int = ...): ...
def readline(self, size: int = ...): ...
def has_data(self): ...
class StreamReader(io.BufferedReader):
bytes_read: int
def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
def read(self, *args, **kwargs): ...
def has_data(self): ...
class StreamWriter(BufferedWriter):
bytes_written: int
def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
def write(self, val, *args, **kwargs): ...
def MakeFile(sock, mode: str = ..., bufsize=...): ...

View file

@ -57,6 +57,10 @@ will run the server forever) or use invoking :func:`prepare()
And now for a trivial doctest to exercise the test suite
.. testsetup::
from cheroot.server import HTTPServer
>>> 'HTTPServer' in globals()
True
"""
@ -273,7 +277,8 @@ class SizeCheckWrapper:
def read(self, size=None):
"""Read a chunk from ``rfile`` buffer and return it.
:param int size: amount of data to read
:param size: amount of data to read
:type size: int
:returns: chunk from ``rfile``, limited by size if specified
:rtype: bytes
@ -286,7 +291,8 @@ class SizeCheckWrapper:
def readline(self, size=None):
"""Read a single line from ``rfile`` buffer and return it.
:param int size: minimum amount of data to read
:param size: minimum amount of data to read
:type size: int
:returns: one line from ``rfile``
:rtype: bytes
@ -312,7 +318,8 @@ class SizeCheckWrapper:
def readlines(self, sizehint=0):
"""Read all lines from ``rfile`` buffer and return them.
:param int sizehint: hint of minimum amount of data to read
:param sizehint: hint of minimum amount of data to read
:type sizehint: int
:returns: lines of bytes read from ``rfile``
:rtype: list[bytes]
@ -362,7 +369,8 @@ class KnownLengthRFile:
def read(self, size=None):
"""Read a chunk from ``rfile`` buffer and return it.
:param int size: amount of data to read
:param size: amount of data to read
:type size: int
:rtype: bytes
:returns: chunk from ``rfile``, limited by size if specified
@ -381,7 +389,8 @@ class KnownLengthRFile:
def readline(self, size=None):
"""Read a single line from ``rfile`` buffer and return it.
:param int size: minimum amount of data to read
:param size: minimum amount of data to read
:type size: int
:returns: one line from ``rfile``
:rtype: bytes
@ -400,7 +409,8 @@ class KnownLengthRFile:
def readlines(self, sizehint=0):
"""Read all lines from ``rfile`` buffer and return them.
:param int sizehint: hint of minimum amount of data to read
:param sizehint: hint of minimum amount of data to read
:type sizehint: int
:returns: lines of bytes read from ``rfile``
:rtype: list[bytes]
@ -501,7 +511,8 @@ class ChunkedRFile:
def read(self, size=None):
"""Read a chunk from ``rfile`` buffer and return it.
:param int size: amount of data to read
:param size: amount of data to read
:type size: int
:returns: chunk from ``rfile``, limited by size if specified
:rtype: bytes
@ -532,7 +543,8 @@ class ChunkedRFile:
def readline(self, size=None):
"""Read a single line from ``rfile`` buffer and return it.
:param int size: minimum amount of data to read
:param size: minimum amount of data to read
:type size: int
:returns: one line from ``rfile``
:rtype: bytes
@ -573,7 +585,8 @@ class ChunkedRFile:
def readlines(self, sizehint=0):
"""Read all lines from ``rfile`` buffer and return them.
:param int sizehint: hint of minimum amount of data to read
:param sizehint: hint of minimum amount of data to read
:type sizehint: int
:returns: lines of bytes read from ``rfile``
:rtype: list[bytes]
@ -1777,7 +1790,7 @@ class HTTPServer:
info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
for res in info:
af, socktype, proto, canonname, sa = res
af, socktype, proto, _canonname, sa = res
try:
self.bind(af, socktype, proto)
break
@ -1841,7 +1854,7 @@ class HTTPServer:
"""Context manager for running this server in a thread."""
self.prepare()
thread = threading.Thread(target=self.serve)
thread.setDaemon(True)
thread.daemon = True
thread.start()
try:
yield thread
@ -2118,7 +2131,7 @@ class HTTPServer:
host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM,
):
af, socktype, proto, canonname, sa = res
af, socktype, proto, _canonname, _sa = res
s = None
try:
s = socket.socket(af, socktype, proto)

172
lib/cheroot/server.pyi Normal file
View file

@ -0,0 +1,172 @@
from typing import Any
class HeaderReader:
def __call__(self, rfile, hdict: Any | None = ...): ...
class DropUnderscoreHeaderReader(HeaderReader): ...
class SizeCheckWrapper:
rfile: Any
maxlen: Any
bytes_read: int
def __init__(self, rfile, maxlen) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def close(self) -> None: ...
def __iter__(self): ...
def __next__(self): ...
next: Any
class KnownLengthRFile:
rfile: Any
remaining: Any
def __init__(self, rfile, content_length) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def close(self) -> None: ...
def __iter__(self): ...
def __next__(self): ...
next: Any
class ChunkedRFile:
rfile: Any
maxlen: Any
bytes_read: int
buffer: Any
bufsize: Any
closed: bool
def __init__(self, rfile, maxlen, bufsize: int = ...) -> None: ...
def read(self, size: Any | None = ...): ...
def readline(self, size: Any | None = ...): ...
def readlines(self, sizehint: int = ...): ...
def read_trailer_lines(self) -> None: ...
def close(self) -> None: ...
class HTTPRequest:
server: Any
conn: Any
inheaders: Any
outheaders: Any
ready: bool
close_connection: bool
chunked_write: bool
header_reader: Any
started_request: bool
scheme: bytes
response_protocol: str
status: str
sent_headers: bool
chunked_read: bool
proxy_mode: Any
strict_mode: Any
def __init__(self, server, conn, proxy_mode: bool = ..., strict_mode: bool = ...) -> None: ...
rfile: Any
def parse_request(self) -> None: ...
uri: Any
method: Any
authority: Any
path: Any
qs: Any
request_protocol: Any
def read_request_line(self): ...
def read_request_headers(self): ...
def respond(self) -> None: ...
def simple_response(self, status, msg: str = ...) -> None: ...
def ensure_headers_sent(self) -> None: ...
def write(self, chunk) -> None: ...
def send_headers(self) -> None: ...
class HTTPConnection:
remote_addr: Any
remote_port: Any
ssl_env: Any
rbufsize: Any
wbufsize: Any
RequestHandlerClass: Any
peercreds_enabled: bool
peercreds_resolve_enabled: bool
last_used: Any
server: Any
socket: Any
rfile: Any
wfile: Any
requests_seen: int
def __init__(self, server, sock, makefile=...) -> None: ...
def communicate(self): ...
linger: bool
def close(self) -> None: ...
def get_peer_creds(self): ...
@property
def peer_pid(self): ...
@property
def peer_uid(self): ...
@property
def peer_gid(self): ...
def resolve_peer_creds(self): ...
@property
def peer_user(self): ...
@property
def peer_group(self): ...
class HTTPServer:
gateway: Any
minthreads: Any
maxthreads: Any
server_name: Any
protocol: str
request_queue_size: int
shutdown_timeout: int
timeout: int
expiration_interval: float
version: Any
software: Any
ready: bool
max_request_header_size: int
max_request_body_size: int
nodelay: bool
ConnectionClass: Any
ssl_adapter: Any
peercreds_enabled: bool
peercreds_resolve_enabled: bool
keep_alive_conn_limit: int
requests: Any
def __init__(self, bind_addr, gateway, minthreads: int = ..., maxthreads: int = ..., server_name: Any | None = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
stats: Any
def clear_stats(self): ...
def runtime(self): ...
@property
def bind_addr(self): ...
@bind_addr.setter
def bind_addr(self, value) -> None: ...
def safe_start(self) -> None: ...
socket: Any
def prepare(self) -> None: ...
def serve(self) -> None: ...
def start(self) -> None: ...
@property
def can_add_keepalive_connection(self): ...
def put_conn(self, conn) -> None: ...
def error_log(self, msg: str = ..., level: int = ..., traceback: bool = ...) -> None: ...
def bind(self, family, type, proto: int = ...): ...
def bind_unix_socket(self, bind_addr): ...
@staticmethod
def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter): ...
@staticmethod
def bind_socket(socket_, bind_addr): ...
@staticmethod
def resolve_real_bind_addr(socket_): ...
def process_conn(self, conn) -> None: ...
@property
def interrupt(self): ...
@interrupt.setter
def interrupt(self, interrupt) -> None: ...
def stop(self) -> None: ...
class Gateway:
req: Any
def __init__(self, req) -> None: ...
def respond(self) -> None: ...
def get_ssl_adapter_class(name: str = ...): ...

View file

@ -0,0 +1,19 @@
from abc import abstractmethod
from typing import Any
class Adapter():
certificate: Any
private_key: Any
certificate_chain: Any
ciphers: Any
context: Any
@abstractmethod
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...): ...
@abstractmethod
def bind(self, sock): ...
@abstractmethod
def wrap(self, sock): ...
@abstractmethod
def get_environ(self): ...
@abstractmethod
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

View file

@ -0,0 +1,18 @@
from typing import Any
from . import Adapter
generic_socket_error: OSError
DEFAULT_BUFFER_SIZE: int
class BuiltinSSLAdapter(Adapter):
CERT_KEY_TO_ENV: Any
CERT_KEY_TO_LDAP_CODE: Any
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
@property
def context(self): ...
@context.setter
def context(self, context) -> None: ...
def bind(self, sock): ...
def wrap(self, sock): ...
def get_environ(self): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

View file

@ -0,0 +1,30 @@
from . import Adapter
from ..makefile import StreamReader, StreamWriter
from OpenSSL import SSL
from typing import Any
ssl_conn_type: SSL.Connection
class SSLFileobjectMixin:
ssl_timeout: int
ssl_retry: float
def recv(self, size): ...
def readline(self, size: int = ...): ...
def sendall(self, *args, **kwargs): ...
def send(self, *args, **kwargs): ...
class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader): ... # type:ignore
class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter): ... # type:ignore
class SSLConnectionProxyMeta:
def __new__(mcl, name, bases, nmspc): ...
class SSLConnection():
def __init__(self, *args) -> None: ...
class pyOpenSSLAdapter(Adapter):
def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
def bind(self, sock): ...
def wrap(self, sock): ...
def get_environ(self): ...
def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...

View file

@ -35,4 +35,16 @@ def pytest_load_initial_conftests(early_config, parser, args):
'<socket.socket fd=-1, family=AddressFamily.AF_INET6, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AF_INET, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<socket.socket fd=-1, family=AF_INET6, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
'ignore:Exception ignored in. '
'<ssl.SSLSocket fd=-1, family=AddressFamily.AF_UNIX, '
'type=SocketKind.SOCK_STREAM, proto=.:'
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
))

View file

@ -5,7 +5,7 @@ itself, useless for end-users' app testing.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
__metaclass__ = type # pylint: disable=invalid-name
import threading
import time
@ -13,19 +13,21 @@ import time
import pytest
from ..server import Gateway, HTTPServer
from ..testing import ( # noqa: F401
from ..testing import ( # noqa: F401 # pylint: disable=unused-import
native_server, wsgi_server,
)
from ..testing import get_server_client
@pytest.fixture
# pylint: disable=redefined-outer-name
def wsgi_server_client(wsgi_server): # noqa: F811
"""Create a test client out of given WSGI server."""
return get_server_client(wsgi_server)
@pytest.fixture
# pylint: disable=redefined-outer-name
def native_server_client(native_server): # noqa: F811
"""Create a test client out of given HTTP server."""
return get_server_client(native_server)
@ -43,7 +45,7 @@ def http_server():
yield httpserver
srv_creator = iter(start_srv())
next(srv_creator)
next(srv_creator) # pylint: disable=stop-iteration-return
yield srv_creator
try:
while True:

View file

@ -87,6 +87,7 @@ def wsgi_app(monkeypatch):
('main', 'main'),
),
)
# pylint: disable=invalid-name
def test_Aplication_resolve(app_name, app_method, wsgi_app):
"""Check the wsgi application name conversion."""
if app_name is None:

View file

@ -17,10 +17,13 @@ import pytest
from jaraco.text import trim, unwrap
from cheroot.test import helper, webtest
from cheroot._compat import IS_CI, IS_PYPY, IS_WINDOWS
from cheroot._compat import IS_CI, IS_MACOS, IS_PYPY, IS_WINDOWS
import cheroot.server
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
timeout = 1
pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN'
@ -169,6 +172,7 @@ def testing_server(raw_testing_server, monkeypatch):
# Teardown verification, in case that the server logged an
# error that wasn't notified to the client or we just made a mistake.
# pylint: disable=possibly-unused-variable
for c_msg, c_level, c_traceback in raw_testing_server.error_log.calls:
if c_level <= logging.WARNING:
continue
@ -651,9 +655,13 @@ def test_broken_connection_during_tcp_fin(
mocker.mock_module.Mock(side_effect=exc_instance),
)
_close_kernel_socket.fin_spy = mocker.spy(self.socket, 'shutdown')
_close_kernel_socket.exception_leaked = True
old_close_kernel_socket(self)
_close_kernel_socket.exception_leaked = False
try:
old_close_kernel_socket(self)
except simulated_exception:
_close_kernel_socket.exception_leaked = True
else:
_close_kernel_socket.exception_leaked = False
monkeypatch.setattr(
test_client.server_instance.ConnectionClass,
@ -668,7 +676,8 @@ def test_broken_connection_during_tcp_fin(
conn.send(('Host: %s' % conn.host).encode('ascii'))
conn.close()
for _ in range(10): # Let the server attempt TCP shutdown
# Let the server attempt TCP shutdown:
for _ in range(10 * (2 if IS_SLOW_ENV else 1)):
time.sleep(0.1)
if hasattr(_close_kernel_socket, 'exception_leaked'):
break
@ -867,7 +876,7 @@ def test_100_Continue(test_client):
conn.endheaders()
conn.send(b"d'oh")
response = conn.response_class(conn.sock, method='POST')
version, status, reason = response._read_status()
_version, status, _reason = response._read_status()
assert status != 100
conn.close()
@ -900,7 +909,7 @@ def test_100_Continue(test_client):
# ...get the final response
response.begin()
status_line, actual_headers, actual_resp_body = webtest.shb(response)
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
actual_status = int(status_line[:3])
assert actual_status == 200
expected_resp_body = ("thanks for '%s'" % body).encode()
@ -933,7 +942,7 @@ def test_readall_or_close(test_client, max_request_body_size):
response = conn.response_class(conn.sock, method='POST')
# ...assert and then skip the 100 response
version, status, reason = response._read_status()
_version, status, _reason = response._read_status()
assert status == 100
skip = True
while skip:
@ -944,7 +953,7 @@ def test_readall_or_close(test_client, max_request_body_size):
# ...get the final response
response.begin()
status_line, actual_headers, actual_resp_body = webtest.shb(response)
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
actual_status = int(status_line[:3])
assert actual_status == 500
@ -1049,7 +1058,7 @@ def test_Chunked_Encoding(test_client):
conn.endheaders()
conn.send(body)
response = conn.getresponse()
status_line, actual_headers, actual_resp_body = webtest.shb(response)
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
actual_status = int(status_line[:3])
assert actual_status == 200
assert status_line[4:] == 'OK'
@ -1089,7 +1098,7 @@ def test_Content_Length_in(test_client):
conn.putheader('Content-Length', '9999')
conn.endheaders()
response = conn.getresponse()
status_line, actual_headers, actual_resp_body = webtest.shb(response)
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
actual_status = int(status_line[:3])
assert actual_status == 413
expected_resp_body = (
@ -1102,7 +1111,7 @@ def test_Content_Length_in(test_client):
def test_Content_Length_not_int(test_client):
"""Test that malicious Content-Length header returns 400."""
status_line, actual_headers, actual_resp_body = test_client.post(
status_line, _actual_headers, actual_resp_body = test_client.post(
'/upload',
headers=[
('Content-Type', 'text/plain'),
@ -1142,7 +1151,7 @@ def test_Content_Length_out(
conn.endheaders()
response = conn.getresponse()
status_line, actual_headers, actual_resp_body = webtest.shb(response)
status_line, _actual_headers, actual_resp_body = webtest.shb(response)
actual_status = int(status_line[:3])
assert actual_status == expected_resp_status
@ -1281,7 +1290,7 @@ def test_invalid_selected_connection(test_client, monkeypatch):
# request a page with connection keep-alive to make sure
# we'll have a connection to be modified.
resp_status, resp_headers, resp_body = test_client.request(
resp_status, _resp_headers, _resp_body = test_client.request(
'/page1', headers=[('Connection', 'Keep-Alive')],
)

View file

@ -43,6 +43,7 @@ class HelloController(helper.Controller):
def asterisk(req, resp):
"""Render request method value."""
# pylint: disable=possibly-unused-variable
method = req.environ.get('REQUEST_METHOD', 'NO METHOD FOUND')
tmpl = 'Got asterisk URI path with {method} method'
return tmpl.format(**locals())

View file

@ -4,7 +4,7 @@ import pytest
from cheroot import errors
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS # noqa: WPS130
@pytest.mark.parametrize(

View file

@ -5,12 +5,10 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from contextlib import closing
import os
import socket
import tempfile
import threading
import time
import uuid
import pytest
@ -18,6 +16,7 @@ import requests
import requests_unixsocket
import six
from pypytools.gc.custom import DefaultGc
from six.moves import queue, urllib
from .._compat import bton, ntob
@ -30,6 +29,9 @@ from ..testing import (
)
IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
unix_only_sock_test = pytest.mark.skipif(
not hasattr(socket, 'AF_UNIX'),
reason='UNIX domain sockets are only available under UNIX-based OS',
@ -181,7 +183,9 @@ def test_serving_is_false_and_stop_returns_after_ctrlc():
serve_thread.start()
# The thread should exit right away due to the interrupt.
serve_thread.join(httpserver.expiration_interval * 2)
serve_thread.join(
httpserver.expiration_interval * (4 if IS_SLOW_ENV else 2),
)
assert not serve_thread.is_alive()
assert not httpserver._connections._serving
@ -263,6 +267,7 @@ def test_peercreds_unix_sock(peercreds_enabled_server):
if isinstance(bind_addr, six.binary_type):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
quoted = urllib.parse.quote(bind_addr, safe='')
unix_base_uri = 'http+unix://{quoted}'.format(**locals())
@ -295,6 +300,7 @@ def test_peercreds_unix_sock_with_lookup(peercreds_enabled_server):
if isinstance(bind_addr, six.binary_type):
bind_addr = bind_addr.decode()
# pylint: disable=possibly-unused-variable
quoted = urllib.parse.quote(bind_addr, safe='')
unix_base_uri = 'http+unix://{quoted}'.format(**locals())
@ -325,7 +331,7 @@ def test_peercreds_unix_sock_with_lookup(peercreds_enabled_server):
indirect=('resource_limit',),
)
@pytest.mark.usefixtures('many_open_sockets')
def test_high_number_of_file_descriptors(resource_limit):
def test_high_number_of_file_descriptors(native_server_client, resource_limit):
"""Test the server does not crash with a high file-descriptor value.
This test shouldn't cause a server crash when trying to access
@ -337,26 +343,24 @@ def test_high_number_of_file_descriptors(resource_limit):
# We want to force the server to use a file-descriptor with
# a number above resource_limit
# Create our server
httpserver = HTTPServer(
bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT), gateway=Gateway,
)
# Patch the method that processes
_old_process_conn = native_server_client.server_instance.process_conn
try:
# This will trigger a crash if select() is used in the implementation
with httpserver._run_in_thread():
# allow server to run long enough to invoke select()
time.sleep(1.0)
except: # noqa: E722
raise # only needed for `else` to work
else:
# We use closing here for py2-compat
with closing(socket.socket()) as sock:
# Check new sockets created are still above our target number
assert sock.fileno() >= resource_limit
finally:
# Stop our server
httpserver.stop()
def native_process_conn(conn):
native_process_conn.filenos.add(conn.socket.fileno())
return _old_process_conn(conn)
native_process_conn.filenos = set()
native_server_client.server_instance.process_conn = native_process_conn
# Trigger a crash if select() is used in the implementation
native_server_client.connect('/')
# Ensure that at least one connection got accepted, otherwise the
# follow-up check wouldn't make sense
assert len(native_process_conn.filenos) > 0
# Check at least one of the sockets created are above the target number
assert any(fn >= resource_limit for fn in native_process_conn.filenos)
if not IS_WINDOWS:
@ -365,6 +369,13 @@ if not IS_WINDOWS:
)
@pytest.fixture
def _garbage_bin():
"""Disable garbage collection when this fixture is in use."""
with DefaultGc().nogc():
yield
@pytest.fixture
def resource_limit(request):
"""Set the resource limit two times bigger then requested."""
@ -392,25 +403,26 @@ def resource_limit(request):
@pytest.fixture
def many_open_sockets(resource_limit):
def many_open_sockets(request, resource_limit):
"""Allocate a lot of file descriptors by opening dummy sockets."""
# NOTE: `@pytest.mark.usefixtures` doesn't work on fixtures which
# NOTE: forces us to invoke this one dynamically to avoid having an
# NOTE: unused argument.
request.getfixturevalue('_garbage_bin')
# Hoard a lot of file descriptors by opening and storing a lot of sockets
test_sockets = []
# Open a lot of file descriptors, so the next one the server
# opens is a high number
try:
for i in range(resource_limit):
for _ in range(resource_limit):
sock = socket.socket()
test_sockets.append(sock)
# NOTE: We used to interrupt the loop early but this doesn't seem
# NOTE: to work well in envs with indeterministic runtimes like
# NOTE: PyPy. It looks like sometimes it frees some file
# NOTE: descriptors in between running this fixture and the actual
# NOTE: test code so the early break has been removed to try
# NOTE: address that. The approach may need to be rethought if the
# NOTE: issue reoccurs. Another approach may be disabling the GC.
# If we reach a high enough number, we don't need to open more
if sock.fileno() >= resource_limit:
break
# Check we opened enough descriptors to reach a high number
the_highest_fileno = max(sock.fileno() for sock in test_sockets)
the_highest_fileno = test_sockets[-1].fileno()
assert the_highest_fileno >= resource_limit
yield the_highest_fileno
finally:

View file

@ -22,7 +22,7 @@ import six
import trustme
from .._compat import bton, ntob, ntou
from .._compat import IS_ABOVE_OPENSSL10, IS_PYPY
from .._compat import IS_ABOVE_OPENSSL10, IS_CI, IS_PYPY
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
from ..server import HTTPServer, get_ssl_adapter_class
from ..testing import (
@ -52,6 +52,7 @@ IS_PYOPENSSL_SSL_VERSION_1_0 = (
PY27 = sys.version_info[:2] == (2, 7)
PY34 = sys.version_info[:2] == (3, 4)
PY3 = not six.PY2
PY310_PLUS = sys.version_info[:2] >= (3, 10)
_stdlib_to_openssl_verify = {
@ -149,8 +150,8 @@ def tls_ca_certificate_pem_path(ca):
@pytest.fixture
def tls_certificate(ca):
"""Provide a leaf certificate via fixture."""
interface, host, port = _get_conn_data(ANY_INTERFACE_IPV4)
return ca.issue_server_cert(ntou(interface))
interface, _host, _port = _get_conn_data(ANY_INTERFACE_IPV4)
return ca.issue_cert(ntou(interface))
@pytest.fixture
@ -270,6 +271,11 @@ def test_ssl_adapters(
ssl.CERT_REQUIRED, # server should validate if client cert CA is OK
),
)
@pytest.mark.xfail(
IS_PYPY and IS_CI,
reason='Fails under PyPy in CI for unknown reason',
strict=False,
)
def test_tls_client_auth( # noqa: C901 # FIXME
# FIXME: remove twisted logic, separate tests
mocker,
@ -294,8 +300,7 @@ def test_tls_client_auth( # noqa: C901 # FIXME
'idna.core.ulabel',
return_value=ntob(tls_client_identity),
):
client_cert = client_cert_root_ca.issue_server_cert(
# FIXME: change to issue_cert once new trustme is out
client_cert = client_cert_root_ca.issue_cert(
ntou(tls_client_identity),
)
del client_cert_root_ca
@ -419,6 +424,10 @@ def test_tls_client_auth( # noqa: C901 # FIXME
'ConnectionResetError(10054, '
"'An existing connection was forcibly closed "
"by the remote host', None, 10054, None))",
"('Connection aborted.', "
'error(10054, '
"'An existing connection was forcibly closed "
"by the remote host'))",
) if IS_WINDOWS else (
"('Connection aborted.', "
'OSError("(104, \'ECONNRESET\')"))',
@ -437,13 +446,35 @@ def test_tls_client_auth( # noqa: C901 # FIXME
"('Connection aborted.', "
"BrokenPipeError(32, 'Broken pipe'))",
)
if PY310_PLUS:
# FIXME: Figure out what's happening and correct the problem
expected_substrings += (
'SSLError(SSLEOFError(8, '
"'EOF occurred in violation of protocol (_ssl.c:",
)
if IS_GITHUB_ACTIONS_WORKFLOW and IS_WINDOWS and PY310_PLUS:
expected_substrings += (
"('Connection aborted.', "
'RemoteDisconnected('
"'Remote end closed connection without response'))",
)
assert any(e in err_text for e in expected_substrings)
@pytest.mark.parametrize( # noqa: C901 # FIXME
'adapter_type',
(
'builtin',
pytest.param(
'builtin',
marks=pytest.mark.xfail(
IS_GITHUB_ACTIONS_WORKFLOW and IS_MACOS and PY310_PLUS,
reason='Unclosed TLS resource warnings happen on macOS '
'under Python 3.10',
strict=False,
),
),
'pyopenssl',
),
)
@ -602,18 +633,19 @@ def test_https_over_http_error(http_server, ip_addr):
assert expected_substring in ssl_err.value.args[-1]
http_over_https_error_builtin_marks = []
if IS_WINDOWS and six.PY2:
http_over_https_error_builtin_marks.append(
pytest.mark.flaky(reruns=5, reruns_delay=2),
)
@pytest.mark.parametrize(
'adapter_type',
(
pytest.param(
'builtin',
marks=pytest.mark.xfail(
IS_WINDOWS and six.PY2,
raises=requests.exceptions.ConnectionError,
reason='Stdlib `ssl` module behaves weirdly '
'on Windows under Python 2',
strict=False,
),
marks=http_over_https_error_builtin_marks,
),
'pyopenssl',
),
@ -654,7 +686,7 @@ def test_http_over_https_error(
interface, _host, port = _get_conn_data(ip_addr)
tlshttpserver = tls_http_server((interface, port), tls_adapter)
interface, host, port = _get_conn_data(
interface, _host, port = _get_conn_data(
tlshttpserver.bind_addr,
)

View file

@ -1,6 +1,7 @@
"""Test wsgi."""
from concurrent.futures.thread import ThreadPoolExecutor
from traceback import print_tb
import pytest
import portend
@ -20,7 +21,7 @@ def simple_wsgi_server():
"""Fucking simple wsgi server fixture (duh)."""
port = portend.find_available_local_port()
def app(environ, start_response):
def app(_environ, start_response):
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
@ -29,7 +30,9 @@ def simple_wsgi_server():
host = '::'
addr = host, port
server = wsgi.Server(addr, app, timeout=600 if IS_SLOW_ENV else 20)
# pylint: disable=possibly-unused-variable
url = 'http://localhost:{port}/'.format(**locals())
# pylint: disable=possibly-unused-variable
with server._run_in_thread() as thread:
yield locals()
@ -46,6 +49,7 @@ def test_connection_keepalive(simple_wsgi_server):
with ExceptionTrap(requests.exceptions.ConnectionError) as trap:
resp = session.get('info')
resp.raise_for_status()
print_tb(trap.tb)
return bool(trap)
with ThreadPoolExecutor(max_workers=10 if IS_SLOW_ENV else 50) as pool:
@ -56,3 +60,24 @@ def test_connection_keepalive(simple_wsgi_server):
failures = sum(task.result() for task in tasks)
assert not failures
def test_gateway_start_response_called_twice(monkeypatch):
"""Verify that repeat calls of ``Gateway.start_response()`` fail."""
monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
wsgi_gateway = wsgi.Gateway(None)
wsgi_gateway.started_response = True
err_msg = '^WSGI start_response called a second time with no exc_info.$'
with pytest.raises(RuntimeError, match=err_msg):
wsgi_gateway.start_response('200', (), None)
def test_gateway_write_needs_start_response_called_before(monkeypatch):
"""Check that calling ``Gateway.write()`` needs started response."""
monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
wsgi_gateway = wsgi.Gateway(None)
err_msg = '^WSGI write called before start_response.$'
with pytest.raises(RuntimeError, match=err_msg):
wsgi_gateway.write(None) # The actual arg value is unimportant

View file

@ -26,7 +26,7 @@ import time
import traceback
import os
import json
import unittest
import unittest # pylint: disable=deprecated-module,preferred-module
import warnings
import functools
@ -434,7 +434,7 @@ def cleanHeaders(headers, method, body, host, port):
# Add the required Host request header if not present.
# [This specifies the host:port of the server, not the client.]
found = False
for k, v in headers:
for k, _v in headers:
if k.lower() == 'host':
found = True
break
@ -498,9 +498,9 @@ def openURL(*args, **kwargs):
opener = functools.partial(_open_url_once, *args, **kwargs)
def on_exception():
type_, exc = sys.exc_info()[:2]
exc = sys.exc_info()[1]
if isinstance(exc, raise_subcls):
raise
raise exc
time.sleep(0.5)
# Try up to 10 times
@ -559,6 +559,10 @@ def strip_netloc(url):
Useful for wrapping an absolute-URI for which only the
path is expected (such as in calls to :py:meth:`WebCase.getPage`).
.. testsetup::
from cheroot.test.webtest import strip_netloc
>>> strip_netloc('https://google.com/foo/bar?bing#baz')
'/foo/bar?bing'
@ -569,7 +573,7 @@ def strip_netloc(url):
'/foo/bar?bing'
"""
parsed = urllib_parse.urlparse(url)
scheme, netloc, path, params, query, fragment = parsed
_scheme, _netloc, path, params, query, _fragment = parsed
stripped = '', '', path, params, query, ''
return urllib_parse.urlunparse(stripped)

17
lib/cheroot/testing.pyi Normal file
View file

@ -0,0 +1,17 @@
from typing import Any, Iterator, Optional, TypeVar
from .server import HTTPServer
from .wsgi import Server
T = TypeVar('T', bound=HTTPServer)
EPHEMERAL_PORT: int
NO_INTERFACE: Optional[str]
ANY_INTERFACE_IPV4: str
ANY_INTERFACE_IPV6: str
config: dict
def cheroot_server(server_factory: T) -> Iterator[T]: ...
def wsgi_server() -> Iterator[Server]: ...
def native_server() -> Iterator[HTTPServer]: ...
def get_server_client(server) -> Any: ...

View file

View file

@ -108,7 +108,7 @@ class WorkerThread(threading.Thread):
Retrieves incoming connections from thread pool.
"""
self.server.stats['Worker Threads'][self.getName()] = self.stats
self.server.stats['Worker Threads'][self.name] = self.stats
try:
self.ready = True
while True:
@ -173,12 +173,12 @@ class ThreadPool:
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
for _ in range(self.min):
self._threads.append(WorkerThread(self.server))
for worker in self._threads:
worker.setName(
worker.name = (
'CP Server {worker_name!s}'.
format(worker_name=worker.getName()),
format(worker_name=worker.name),
)
worker.start()
for worker in self._threads:
@ -187,7 +187,7 @@ class ThreadPool:
@property
def idle(self): # noqa: D401; irrelevant for properties
"""Number of worker threads which are idle. Read-only."""
"""Number of worker threads which are idle. Read-only.""" # noqa: D401
idles = len([t for t in self._threads if t.conn is None])
return max(idles - len(self._pending_shutdowns), 0)
@ -226,9 +226,9 @@ class ThreadPool:
def _spawn_worker(self):
worker = WorkerThread(self.server)
worker.setName(
worker.name = (
'CP Server {worker_name!s}'.
format(worker_name=worker.getName()),
format(worker_name=worker.name),
)
worker.start()
return worker
@ -251,7 +251,7 @@ class ThreadPool:
# put shutdown requests on the queue equal to the number of threads
# to remove. As each request is processed by a worker, that worker
# will terminate and be culled from the list.
for n in range(n_to_remove):
for _ in range(n_to_remove):
self._pending_shutdowns.append(None)
self._queue.put(_SHUTDOWNREQUEST)
@ -280,8 +280,9 @@ class ThreadPool:
self._queue.put(_SHUTDOWNREQUEST)
ignored_errors = (
# TODO: explain this exception.
AssertionError,
# Raised when start_response called >1 time w/o exc_info or
# wsgi write is called before start_response. See cheroot#261
RuntimeError,
# Ignore repeated Ctrl-C. See cherrypy#691.
KeyboardInterrupt,
)
@ -320,7 +321,7 @@ class ThreadPool:
return (
thread
for thread in threads
if thread is not threading.currentThread()
if thread is not threading.current_thread()
)
@property

View file

@ -0,0 +1,37 @@
import threading
from typing import Any
class TrueyZero:
def __add__(self, other): ...
def __radd__(self, other): ...
trueyzero: TrueyZero
class WorkerThread(threading.Thread):
conn: Any
server: Any
ready: bool
requests_seen: int
bytes_read: int
bytes_written: int
start_time: Any
work_time: int
stats: Any
def __init__(self, server): ...
def run(self) -> None: ...
class ThreadPool:
server: Any
min: Any
max: Any
get: Any
def __init__(self, server, min: int = ..., max: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ...) -> None: ...
def start(self) -> None: ...
@property
def idle(self): ...
def put(self, obj) -> None: ...
def grow(self, amount) -> None: ...
def shrink(self, amount) -> None: ...
def stop(self, timeout: int = ...) -> None: ...
@property
def qsize(self) -> int: ...

View file

@ -154,7 +154,7 @@ class Gateway(server.Gateway):
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
if self.started_response and not exc_info:
raise AssertionError(
raise RuntimeError(
'WSGI start_response called a second '
'time with no exc_info.',
)
@ -209,7 +209,7 @@ class Gateway(server.Gateway):
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError('WSGI write called before start_response.')
raise RuntimeError('WSGI write called before start_response.')
chunklen = len(chunk)
rbo = self.remaining_bytes_out

42
lib/cheroot/wsgi.pyi Normal file
View file

@ -0,0 +1,42 @@
from . import server
from typing import Any
class Server(server.HTTPServer):
wsgi_version: Any
wsgi_app: Any
request_queue_size: Any
timeout: Any
shutdown_timeout: Any
requests: Any
def __init__(self, bind_addr, wsgi_app, numthreads: int = ..., server_name: Any | None = ..., max: int = ..., request_queue_size: int = ..., timeout: int = ..., shutdown_timeout: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
@property
def numthreads(self): ...
@numthreads.setter
def numthreads(self, value) -> None: ...
class Gateway(server.Gateway):
started_response: bool
env: Any
remaining_bytes_out: Any
def __init__(self, req) -> None: ...
@classmethod
def gateway_map(cls): ...
def get_environ(self) -> None: ...
def respond(self) -> None: ...
def start_response(self, status, headers, exc_info: Any | None = ...): ...
def write(self, chunk) -> None: ...
class Gateway_10(Gateway):
version: Any
def get_environ(self): ...
class Gateway_u0(Gateway_10):
version: Any
def get_environ(self): ...
wsgi_gateways: Any
class PathInfoDispatcher:
apps: Any
def __init__(self, apps): ...
def __call__(self, environ, start_response): ...

View file

@ -7,14 +7,20 @@ import itertools
import more_itertools
from typing import Callable, TypeVar
CallableT = TypeVar("CallableT", bound=Callable[..., object])
def compose(*funcs):
"""
Compose any number of unary functions into a single unary function.
>>> import textwrap
>>> stripped = str.strip(textwrap.dedent(compose.__doc__))
>>> compose(str.strip, textwrap.dedent)(compose.__doc__) == stripped
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
>>> strip_and_dedent(compose.__doc__) == expected
True
Compose also allows the innermost function to take arbitrary arguments.
@ -91,7 +97,12 @@ def once(func):
return wrapper
def method_cache(method, cache_wrapper=None):
def method_cache(
method: CallableT,
cache_wrapper: Callable[
[CallableT], CallableT
] = functools.lru_cache(), # type: ignore[assignment]
) -> CallableT:
"""
Wrap lru_cache to support storing the cache data in the object instances.
@ -158,19 +169,22 @@ def method_cache(method, cache_wrapper=None):
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
for another implementation and additional justification.
"""
cache_wrapper = cache_wrapper or functools.lru_cache()
def wrapper(self, *args, **kwargs):
def wrapper(self: object, *args: object, **kwargs: object) -> object:
# it's the first call, replace the method with a cached, bound method
bound_method = types.MethodType(method, self)
bound_method: CallableT = types.MethodType( # type: ignore[assignment]
method, self
)
cached_method = cache_wrapper(bound_method)
setattr(self, method.__name__, cached_method)
return cached_method(*args, **kwargs)
# Support cache clear even before cache has been created.
wrapper.cache_clear = lambda: None
wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
return _special_method_cache(method, cache_wrapper) or wrapper
return ( # type: ignore[return-value]
_special_method_cache(method, cache_wrapper) or wrapper
)
def _special_method_cache(method, cache_wrapper):
@ -210,13 +224,16 @@ def apply(transform):
>>> @apply(reversed)
... def get_numbers(start):
... "doc for get_numbers"
... return range(start, start+3)
>>> list(get_numbers(4))
[6, 5, 4]
>>> get_numbers.__doc__
'doc for get_numbers'
"""
def wrap(func):
return compose(transform, func)
return functools.wraps(func)(compose(transform, func))
return wrap
@ -233,6 +250,8 @@ def result_invoke(action):
... return a + b
>>> x = add_two(2, 3)
5
>>> x
5
"""
def wrap(func):

View file

@ -7,7 +7,7 @@ backports.zoneinfo==0.2.1
beautifulsoup4==4.10.0
bleach==4.1.0
certifi==2021.10.8
cheroot==8.5.2
cheroot==8.6.0
cherrypy==18.6.1
cloudinary==1.28.0
distro==1.6.0