mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-22 14:13:40 -07:00
Merge branch 'nightly' into concurrent_stream_graph
This commit is contained in:
commit
254da125ab
333 changed files with 10884 additions and 5566 deletions
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
|
@ -24,7 +24,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v2
|
||||||
|
|
4
.github/workflows/issues-stale.yml
vendored
4
.github/workflows/issues-stale.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Stale
|
- name: Stale
|
||||||
uses: actions/stale@v7
|
uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue is stale because it has been open for 30 days with no activity.
|
This issue is stale because it has been open for 30 days with no activity.
|
||||||
|
@ -30,7 +30,7 @@ jobs:
|
||||||
days-before-close: 5
|
days-before-close: 5
|
||||||
|
|
||||||
- name: Invalid Template
|
- name: Invalid Template
|
||||||
uses: actions/stale@v7
|
uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
Invalid issues template.
|
Invalid issues template.
|
||||||
|
|
14
.github/workflows/publish-docker.yml
vendored
14
.github/workflows/publish-docker.yml
vendored
|
@ -13,7 +13,7 @@ jobs:
|
||||||
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
|
if: ${{ !contains(github.event.head_commit.message, '[skip ci]') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
id: prepare
|
id: prepare
|
||||||
|
@ -38,10 +38,10 @@ jobs:
|
||||||
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
echo "docker_image=${{ secrets.DOCKER_REPO }}/tautulli" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set Up QEMU
|
- name: Set Up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v3
|
||||||
id: buildx
|
id: buildx
|
||||||
with:
|
with:
|
||||||
version: latest
|
version: latest
|
||||||
|
@ -55,14 +55,14 @@ jobs:
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
|
@ -70,7 +70,7 @@ jobs:
|
||||||
password: ${{ secrets.GHCR_TOKEN }}
|
password: ${{ secrets.GHCR_TOKEN }}
|
||||||
|
|
||||||
- name: Docker Build and Push
|
- name: Docker Build and Push
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v5
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
|
@ -95,7 +95,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Get Build Job Status
|
- name: Get Build Job Status
|
||||||
uses: technote-space/workflow-conclusion-action@v3.0
|
uses: technote-space/workflow-conclusion-action@v3
|
||||||
|
|
||||||
- name: Combine Job Status
|
- name: Combine Job Status
|
||||||
id: status
|
id: status
|
||||||
|
|
10
.github/workflows/publish-installers.yml
vendored
10
.github/workflows/publish-installers.yml
vendored
|
@ -24,7 +24,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set Release Version
|
- name: Set Release Version
|
||||||
id: get_version
|
id: get_version
|
||||||
|
@ -68,7 +68,7 @@ jobs:
|
||||||
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
pyinstaller -y ./package/Tautulli-${{ matrix.os }}.spec
|
||||||
|
|
||||||
- name: Create Windows Installer
|
- name: Create Windows Installer
|
||||||
uses: joncloud/makensis-action@v3.7
|
uses: joncloud/makensis-action@v4
|
||||||
if: matrix.os == 'windows'
|
if: matrix.os == 'windows'
|
||||||
with:
|
with:
|
||||||
script-file: ./package/Tautulli.nsi
|
script-file: ./package/Tautulli.nsi
|
||||||
|
@ -100,10 +100,10 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Get Build Job Status
|
- name: Get Build Job Status
|
||||||
uses: technote-space/workflow-conclusion-action@v3.0
|
uses: technote-space/workflow-conclusion-action@v3
|
||||||
|
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3.2.0
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set Release Version
|
- name: Set Release Version
|
||||||
id: get_version
|
id: get_version
|
||||||
|
@ -168,7 +168,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Get Build Job Status
|
- name: Get Build Job Status
|
||||||
uses: technote-space/workflow-conclusion-action@v3.0
|
uses: technote-space/workflow-conclusion-action@v3
|
||||||
|
|
||||||
- name: Combine Job Status
|
- name: Combine Job Status
|
||||||
id: status
|
id: status
|
||||||
|
|
6
.github/workflows/publish-snap.yml
vendored
6
.github/workflows/publish-snap.yml
vendored
|
@ -20,7 +20,7 @@ jobs:
|
||||||
- armhf
|
- armhf
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Prepare
|
- name: Prepare
|
||||||
id: prepare
|
id: prepare
|
||||||
|
@ -35,7 +35,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set Up QEMU
|
- name: Set Up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
- name: Build Snap Package
|
- name: Build Snap Package
|
||||||
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
uses: diddlesnaps/snapcraft-multiarch-action@v1
|
||||||
|
@ -70,7 +70,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Get Build Job Status
|
- name: Get Build Job Status
|
||||||
uses: technote-space/workflow-conclusion-action@v3.0
|
uses: technote-space/workflow-conclusion-action@v3
|
||||||
|
|
||||||
- name: Combine Job Status
|
- name: Combine Job Status
|
||||||
id: status
|
id: status
|
||||||
|
|
3
.github/workflows/pull-requests.yml
vendored
3
.github/workflows/pull-requests.yml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Code
|
- name: Checkout Code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Comment on Pull Request
|
- name: Comment on Pull Request
|
||||||
uses: mshick/add-pr-comment@v2
|
uses: mshick/add-pr-comment@v2
|
||||||
|
@ -18,7 +18,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
message: Pull requests must be made to the `nightly` branch. Thanks.
|
message: Pull requests must be made to the `nightly` branch. Thanks.
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
repo-token-user-login: 'github-actions[bot]'
|
|
||||||
|
|
||||||
- name: Fail Workflow
|
- name: Fail Workflow
|
||||||
if: github.base_ref != 'nightly'
|
if: github.base_ref != 'nightly'
|
||||||
|
|
36
CHANGELOG.md
36
CHANGELOG.md
|
@ -1,5 +1,41 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v2.13.1 (2023-08-25)
|
||||||
|
* Notes:
|
||||||
|
* Support for Python 3.7 has been dropped. The minimum Python version is now 3.8.
|
||||||
|
* Other:
|
||||||
|
* Fix: Tautulli failing to start on some systems.
|
||||||
|
|
||||||
|
|
||||||
|
## v2.13.0 (2023-08-25)
|
||||||
|
|
||||||
|
* Notes:
|
||||||
|
* Support for Python 3.7 has been dropped. The minimum Python version is now 3.8.
|
||||||
|
* Notifications:
|
||||||
|
* Fix: Improved watched notification trigger description. (#2104)
|
||||||
|
* New: Added notification image option for iOS Tautulli Remote app.
|
||||||
|
* Exporter:
|
||||||
|
* New: Added track chapter export fields.
|
||||||
|
* New: Added on-demand subtitle export fields.
|
||||||
|
|
||||||
|
|
||||||
|
## v2.12.5 (2023-07-13)
|
||||||
|
|
||||||
|
* Activity:
|
||||||
|
* New: Added d3d11va to list of hardware decoders.
|
||||||
|
* History:
|
||||||
|
* Fix: Incorrect grouping of play history.
|
||||||
|
* New: Added button in settings to regroup play history.
|
||||||
|
* Notifications:
|
||||||
|
* Fix: Incorrect concurrent streams notifications by IP addresss for IPv6 addresses (#2096) (Thanks @pooley182)
|
||||||
|
* UI:
|
||||||
|
* Fix: Occasional UI crashing on Python 3.11.
|
||||||
|
* New: Added multiselect user filters to History and Graphs pages. (#2090) (Thanks @zdimension)
|
||||||
|
* API:
|
||||||
|
* New: Added regroup_history API command.
|
||||||
|
* Change: Updated graph API commands to accept a comma separated list of user IDs.
|
||||||
|
|
||||||
|
|
||||||
## v2.12.4 (2023-05-23)
|
## v2.12.4 (2023-05-23)
|
||||||
|
|
||||||
* History:
|
* History:
|
||||||
|
|
|
@ -9,7 +9,7 @@ All pull requests should be based on the `nightly` branch, to minimize cross mer
|
||||||
### Python Code
|
### Python Code
|
||||||
|
|
||||||
#### Compatibility
|
#### Compatibility
|
||||||
The code should work with Python 3.7+. Note that Tautulli runs on many different platforms.
|
The code should work with Python 3.8+. Note that Tautulli runs on many different platforms.
|
||||||
|
|
||||||
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
|
Re-use existing code. Do not hesitate to add logging in your code. You can the logger module `plexpy.logger.*` for this. Web requests are invoked via `plexpy.request.*` and derived ones. Use these methods to automatically add proper and meaningful error handling.
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ and [PlexWatchWeb](https://github.com/ecleese/plexWatchWeb).
|
||||||
[![Docker Stars][badge-docker-stars]][DockerHub]
|
[![Docker Stars][badge-docker-stars]][DockerHub]
|
||||||
[![Downloads][badge-downloads]][Releases Latest]
|
[![Downloads][badge-downloads]][Releases Latest]
|
||||||
|
|
||||||
[badge-python]: https://img.shields.io/badge/python->=3.7-blue?style=flat-square
|
[badge-python]: https://img.shields.io/badge/python->=3.8-blue?style=flat-square
|
||||||
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
[badge-docker-pulls]: https://img.shields.io/docker/pulls/tautulli/tautulli?style=flat-square
|
||||||
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
[badge-docker-stars]: https://img.shields.io/docker/stars/tautulli/tautulli?style=flat-square
|
||||||
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
[badge-downloads]: https://img.shields.io/github/downloads/Tautulli/Tautulli/total?style=flat-square
|
||||||
|
|
|
@ -1,5 +1 @@
|
||||||
# A Python "namespace package" http://www.python.org/dev/peps/pep-0382/
|
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
||||||
# This always goes inside of a namespace package's __init__.py
|
|
||||||
from pkgutil import extend_path
|
|
||||||
|
|
||||||
__path__ = extend_path(__path__, __name__) # type: ignore
|
|
||||||
|
|
|
@ -89,7 +89,6 @@ def lru_cache(maxsize=100, typed=False): # noqa: C901
|
||||||
# to allow the implementation to change (including a possible C version).
|
# to allow the implementation to change (including a possible C version).
|
||||||
|
|
||||||
def decorating_function(user_function):
|
def decorating_function(user_function):
|
||||||
|
|
||||||
cache = dict()
|
cache = dict()
|
||||||
stats = [0, 0] # make statistics updateable non-locally
|
stats = [0, 0] # make statistics updateable non-locally
|
||||||
HITS, MISSES = 0, 1 # names for the stats fields
|
HITS, MISSES = 0, 1 # names for the stats fields
|
||||||
|
|
|
@ -15,7 +15,7 @@ documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||||
__version__ = "4.11.2"
|
__version__ = "4.12.2"
|
||||||
__copyright__ = "Copyright (c) 2004-2023 Leonard Richardson"
|
__copyright__ = "Copyright (c) 2004-2023 Leonard Richardson"
|
||||||
# Use of this source code is governed by the MIT license.
|
# Use of this source code is governed by the MIT license.
|
||||||
__license__ = "MIT"
|
__license__ = "MIT"
|
||||||
|
@ -38,11 +38,13 @@ from .builder import (
|
||||||
builder_registry,
|
builder_registry,
|
||||||
ParserRejectedMarkup,
|
ParserRejectedMarkup,
|
||||||
XMLParsedAsHTMLWarning,
|
XMLParsedAsHTMLWarning,
|
||||||
|
HTMLParserTreeBuilder
|
||||||
)
|
)
|
||||||
from .dammit import UnicodeDammit
|
from .dammit import UnicodeDammit
|
||||||
from .element import (
|
from .element import (
|
||||||
CData,
|
CData,
|
||||||
Comment,
|
Comment,
|
||||||
|
CSS,
|
||||||
DEFAULT_OUTPUT_ENCODING,
|
DEFAULT_OUTPUT_ENCODING,
|
||||||
Declaration,
|
Declaration,
|
||||||
Doctype,
|
Doctype,
|
||||||
|
@ -348,26 +350,50 @@ class BeautifulSoup(Tag):
|
||||||
self.markup = None
|
self.markup = None
|
||||||
self.builder.soup = None
|
self.builder.soup = None
|
||||||
|
|
||||||
def __copy__(self):
|
def _clone(self):
|
||||||
"""Copy a BeautifulSoup object by converting the document to a string and parsing it again."""
|
"""Create a new BeautifulSoup object with the same TreeBuilder,
|
||||||
copy = type(self)(
|
but not associated with any markup.
|
||||||
self.encode('utf-8'), builder=self.builder, from_encoding='utf-8'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Although we encoded the tree to UTF-8, that may not have
|
This is the first step of the deepcopy process.
|
||||||
# been the encoding of the original markup. Set the copy's
|
"""
|
||||||
# .original_encoding to reflect the original object's
|
clone = type(self)("", None, self.builder)
|
||||||
# .original_encoding.
|
|
||||||
copy.original_encoding = self.original_encoding
|
# Keep track of the encoding of the original document,
|
||||||
return copy
|
# since we won't be parsing it again.
|
||||||
|
clone.original_encoding = self.original_encoding
|
||||||
|
return clone
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
# Frequently a tree builder can't be pickled.
|
# Frequently a tree builder can't be pickled.
|
||||||
d = dict(self.__dict__)
|
d = dict(self.__dict__)
|
||||||
if 'builder' in d and d['builder'] is not None and not self.builder.picklable:
|
if 'builder' in d and d['builder'] is not None and not self.builder.picklable:
|
||||||
d['builder'] = None
|
d['builder'] = type(self.builder)
|
||||||
|
# Store the contents as a Unicode string.
|
||||||
|
d['contents'] = []
|
||||||
|
d['markup'] = self.decode()
|
||||||
|
|
||||||
|
# If _most_recent_element is present, it's a Tag object left
|
||||||
|
# over from initial parse. It might not be picklable and we
|
||||||
|
# don't need it.
|
||||||
|
if '_most_recent_element' in d:
|
||||||
|
del d['_most_recent_element']
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
def __setstate__(self, state):
|
||||||
|
# If necessary, restore the TreeBuilder by looking it up.
|
||||||
|
self.__dict__ = state
|
||||||
|
if isinstance(self.builder, type):
|
||||||
|
self.builder = self.builder()
|
||||||
|
elif not self.builder:
|
||||||
|
# We don't know which builder was used to build this
|
||||||
|
# parse tree, so use a default we know is always available.
|
||||||
|
self.builder = HTMLParserTreeBuilder()
|
||||||
|
self.builder.soup = self
|
||||||
|
self.reset()
|
||||||
|
self._feed()
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _decode_markup(cls, markup):
|
def _decode_markup(cls, markup):
|
||||||
"""Ensure `markup` is bytes so it's safe to send into warnings.warn.
|
"""Ensure `markup` is bytes so it's safe to send into warnings.warn.
|
||||||
|
@ -468,6 +494,7 @@ class BeautifulSoup(Tag):
|
||||||
self.open_tag_counter = Counter()
|
self.open_tag_counter = Counter()
|
||||||
self.preserve_whitespace_tag_stack = []
|
self.preserve_whitespace_tag_stack = []
|
||||||
self.string_container_stack = []
|
self.string_container_stack = []
|
||||||
|
self._most_recent_element = None
|
||||||
self.pushTag(self)
|
self.pushTag(self)
|
||||||
|
|
||||||
def new_tag(self, name, namespace=None, nsprefix=None, attrs={},
|
def new_tag(self, name, namespace=None, nsprefix=None, attrs={},
|
||||||
|
@ -749,7 +776,7 @@ class BeautifulSoup(Tag):
|
||||||
|
|
||||||
def decode(self, pretty_print=False,
|
def decode(self, pretty_print=False,
|
||||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||||
formatter="minimal"):
|
formatter="minimal", iterator=None):
|
||||||
"""Returns a string or Unicode representation of the parse tree
|
"""Returns a string or Unicode representation of the parse tree
|
||||||
as an HTML or XML document.
|
as an HTML or XML document.
|
||||||
|
|
||||||
|
@ -776,7 +803,7 @@ class BeautifulSoup(Tag):
|
||||||
else:
|
else:
|
||||||
indent_level = 0
|
indent_level = 0
|
||||||
return prefix + super(BeautifulSoup, self).decode(
|
return prefix + super(BeautifulSoup, self).decode(
|
||||||
indent_level, eventual_encoding, formatter)
|
indent_level, eventual_encoding, formatter, iterator)
|
||||||
|
|
||||||
# Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup'
|
# Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup'
|
||||||
_s = BeautifulSoup
|
_s = BeautifulSoup
|
||||||
|
|
|
@ -24,6 +24,7 @@ from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||||
|
|
||||||
from bs4.builder import (
|
from bs4.builder import (
|
||||||
DetectsXMLParsedAsHTML,
|
DetectsXMLParsedAsHTML,
|
||||||
|
ParserRejectedMarkup,
|
||||||
HTML,
|
HTML,
|
||||||
HTMLTreeBuilder,
|
HTMLTreeBuilder,
|
||||||
STRICT,
|
STRICT,
|
||||||
|
@ -70,6 +71,22 @@ class BeautifulSoupHTMLParser(HTMLParser, DetectsXMLParsedAsHTML):
|
||||||
|
|
||||||
self._initialize_xml_detector()
|
self._initialize_xml_detector()
|
||||||
|
|
||||||
|
def error(self, message):
|
||||||
|
# NOTE: This method is required so long as Python 3.9 is
|
||||||
|
# supported. The corresponding code is removed from HTMLParser
|
||||||
|
# in 3.5, but not removed from ParserBase until 3.10.
|
||||||
|
# https://github.com/python/cpython/issues/76025
|
||||||
|
#
|
||||||
|
# The original implementation turned the error into a warning,
|
||||||
|
# but in every case I discovered, this made HTMLParser
|
||||||
|
# immediately crash with an error message that was less
|
||||||
|
# helpful than the warning. The new implementation makes it
|
||||||
|
# more clear that html.parser just can't parse this
|
||||||
|
# markup. The 3.10 implementation does the same, though it
|
||||||
|
# raises AssertionError rather than calling a method. (We
|
||||||
|
# catch this error and wrap it in a ParserRejectedMarkup.)
|
||||||
|
raise ParserRejectedMarkup(message)
|
||||||
|
|
||||||
def handle_startendtag(self, name, attrs):
|
def handle_startendtag(self, name, attrs):
|
||||||
"""Handle an incoming empty-element tag.
|
"""Handle an incoming empty-element tag.
|
||||||
|
|
||||||
|
@ -359,6 +376,12 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||||
args, kwargs = self.parser_args
|
args, kwargs = self.parser_args
|
||||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||||
parser.soup = self.soup
|
parser.soup = self.soup
|
||||||
|
try:
|
||||||
parser.feed(markup)
|
parser.feed(markup)
|
||||||
|
except AssertionError as e:
|
||||||
|
# html.parser raises AssertionError in rare cases to
|
||||||
|
# indicate a fatal problem with the markup, especially
|
||||||
|
# when there's an error in the doctype declaration.
|
||||||
|
raise ParserRejectedMarkup(e)
|
||||||
parser.close()
|
parser.close()
|
||||||
parser.already_closed_empty_element = []
|
parser.already_closed_empty_element = []
|
||||||
|
|
280
lib/bs4/css.py
Normal file
280
lib/bs4/css.py
Normal file
|
@ -0,0 +1,280 @@
|
||||||
|
"""Integration code for CSS selectors using Soup Sieve (pypi: soupsieve)."""
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
try:
|
||||||
|
import soupsieve
|
||||||
|
except ImportError as e:
|
||||||
|
soupsieve = None
|
||||||
|
warnings.warn(
|
||||||
|
'The soupsieve package is not installed. CSS selectors cannot be used.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CSS(object):
|
||||||
|
"""A proxy object against the soupsieve library, to simplify its
|
||||||
|
CSS selector API.
|
||||||
|
|
||||||
|
Acquire this object through the .css attribute on the
|
||||||
|
BeautifulSoup object, or on the Tag you want to use as the
|
||||||
|
starting point for a CSS selector.
|
||||||
|
|
||||||
|
The main advantage of doing this is that the tag to be selected
|
||||||
|
against doesn't need to be explicitly specified in the function
|
||||||
|
calls, since it's already scoped to a tag.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, tag, api=soupsieve):
|
||||||
|
"""Constructor.
|
||||||
|
|
||||||
|
You don't need to instantiate this class yourself; instead,
|
||||||
|
access the .css attribute on the BeautifulSoup object, or on
|
||||||
|
the Tag you want to use as the starting point for your CSS
|
||||||
|
selector.
|
||||||
|
|
||||||
|
:param tag: All CSS selectors will use this as their starting
|
||||||
|
point.
|
||||||
|
|
||||||
|
:param api: A plug-in replacement for the soupsieve module,
|
||||||
|
designed mainly for use in tests.
|
||||||
|
"""
|
||||||
|
if api is None:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Cannot execute CSS selectors because the soupsieve package is not installed."
|
||||||
|
)
|
||||||
|
self.api = api
|
||||||
|
self.tag = tag
|
||||||
|
|
||||||
|
def escape(self, ident):
|
||||||
|
"""Escape a CSS identifier.
|
||||||
|
|
||||||
|
This is a simple wrapper around soupselect.escape(). See the
|
||||||
|
documentation for that function for more information.
|
||||||
|
"""
|
||||||
|
if soupsieve is None:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Cannot escape CSS identifiers because the soupsieve package is not installed."
|
||||||
|
)
|
||||||
|
return self.api.escape(ident)
|
||||||
|
|
||||||
|
def _ns(self, ns, select):
|
||||||
|
"""Normalize a dictionary of namespaces."""
|
||||||
|
if not isinstance(select, self.api.SoupSieve) and ns is None:
|
||||||
|
# If the selector is a precompiled pattern, it already has
|
||||||
|
# a namespace context compiled in, which cannot be
|
||||||
|
# replaced.
|
||||||
|
ns = self.tag._namespaces
|
||||||
|
return ns
|
||||||
|
|
||||||
|
def _rs(self, results):
|
||||||
|
"""Normalize a list of results to a Resultset.
|
||||||
|
|
||||||
|
A ResultSet is more consistent with the rest of Beautiful
|
||||||
|
Soup's API, and ResultSet.__getattr__ has a helpful error
|
||||||
|
message if you try to treat a list of results as a single
|
||||||
|
result (a common mistake).
|
||||||
|
"""
|
||||||
|
# Import here to avoid circular import
|
||||||
|
from bs4.element import ResultSet
|
||||||
|
return ResultSet(None, results)
|
||||||
|
|
||||||
|
def compile(self, select, namespaces=None, flags=0, **kwargs):
|
||||||
|
"""Pre-compile a selector and return the compiled object.
|
||||||
|
|
||||||
|
:param selector: A CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will use the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.compile() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.compile() method.
|
||||||
|
|
||||||
|
:return: A precompiled selector object.
|
||||||
|
:rtype: soupsieve.SoupSieve
|
||||||
|
"""
|
||||||
|
return self.api.compile(
|
||||||
|
select, self._ns(namespaces, select), flags, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def select_one(self, select, namespaces=None, flags=0, **kwargs):
|
||||||
|
"""Perform a CSS selection operation on the current Tag and return the
|
||||||
|
first result.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. For more information, see
|
||||||
|
that library's documentation for the soupsieve.select_one()
|
||||||
|
method.
|
||||||
|
|
||||||
|
:param selector: A CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will use the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.select_one() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.select_one() method.
|
||||||
|
|
||||||
|
:return: A Tag, or None if the selector has no match.
|
||||||
|
:rtype: bs4.element.Tag
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.api.select_one(
|
||||||
|
select, self.tag, self._ns(namespaces, select), flags, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def select(self, select, namespaces=None, limit=0, flags=0, **kwargs):
|
||||||
|
"""Perform a CSS selection operation on the current Tag.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. For more information, see
|
||||||
|
that library's documentation for the soupsieve.select()
|
||||||
|
method.
|
||||||
|
|
||||||
|
:param selector: A string containing a CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will pass in the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param limit: After finding this number of results, stop looking.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.select() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.select() method.
|
||||||
|
|
||||||
|
:return: A ResultSet of Tag objects.
|
||||||
|
:rtype: bs4.element.ResultSet
|
||||||
|
|
||||||
|
"""
|
||||||
|
if limit is None:
|
||||||
|
limit = 0
|
||||||
|
|
||||||
|
return self._rs(
|
||||||
|
self.api.select(
|
||||||
|
select, self.tag, self._ns(namespaces, select), limit, flags,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def iselect(self, select, namespaces=None, limit=0, flags=0, **kwargs):
|
||||||
|
"""Perform a CSS selection operation on the current Tag.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. For more information, see
|
||||||
|
that library's documentation for the soupsieve.iselect()
|
||||||
|
method. It is the same as select(), but it returns a generator
|
||||||
|
instead of a list.
|
||||||
|
|
||||||
|
:param selector: A string containing a CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will pass in the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param limit: After finding this number of results, stop looking.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.iselect() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.iselect() method.
|
||||||
|
|
||||||
|
:return: A generator
|
||||||
|
:rtype: types.GeneratorType
|
||||||
|
"""
|
||||||
|
return self.api.iselect(
|
||||||
|
select, self.tag, self._ns(namespaces, select), limit, flags, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def closest(self, select, namespaces=None, flags=0, **kwargs):
|
||||||
|
"""Find the Tag closest to this one that matches the given selector.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. For more information, see
|
||||||
|
that library's documentation for the soupsieve.closest()
|
||||||
|
method.
|
||||||
|
|
||||||
|
:param selector: A string containing a CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will pass in the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.closest() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.closest() method.
|
||||||
|
|
||||||
|
:return: A Tag, or None if there is no match.
|
||||||
|
:rtype: bs4.Tag
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self.api.closest(
|
||||||
|
select, self.tag, self._ns(namespaces, select), flags, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def match(self, select, namespaces=None, flags=0, **kwargs):
|
||||||
|
"""Check whether this Tag matches the given CSS selector.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. For more information, see
|
||||||
|
that library's documentation for the soupsieve.match()
|
||||||
|
method.
|
||||||
|
|
||||||
|
:param: a CSS selector.
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will pass in the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.match() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.match() method.
|
||||||
|
|
||||||
|
:return: True if this Tag matches the selector; False otherwise.
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return self.api.match(
|
||||||
|
select, self.tag, self._ns(namespaces, select), flags, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter(self, select, namespaces=None, flags=0, **kwargs):
|
||||||
|
"""Filter this Tag's direct children based on the given CSS selector.
|
||||||
|
|
||||||
|
This uses the Soup Sieve library. It works the same way as
|
||||||
|
passing this Tag into that library's soupsieve.filter()
|
||||||
|
method. More information, for more information see the
|
||||||
|
documentation for soupsieve.filter().
|
||||||
|
|
||||||
|
:param namespaces: A dictionary mapping namespace prefixes
|
||||||
|
used in the CSS selector to namespace URIs. By default,
|
||||||
|
Beautiful Soup will pass in the prefixes it encountered while
|
||||||
|
parsing the document.
|
||||||
|
|
||||||
|
:param flags: Flags to be passed into Soup Sieve's
|
||||||
|
soupsieve.filter() method.
|
||||||
|
|
||||||
|
:param kwargs: Keyword arguments to be passed into SoupSieve's
|
||||||
|
soupsieve.filter() method.
|
||||||
|
|
||||||
|
:return: A ResultSet of Tag objects.
|
||||||
|
:rtype: bs4.element.ResultSet
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self._rs(
|
||||||
|
self.api.filter(
|
||||||
|
select, self.tag, self._ns(namespaces, select), flags, **kwargs
|
||||||
|
)
|
||||||
|
)
|
|
@ -59,21 +59,6 @@ def diagnose(data):
|
||||||
|
|
||||||
if hasattr(data, 'read'):
|
if hasattr(data, 'read'):
|
||||||
data = data.read()
|
data = data.read()
|
||||||
elif data.startswith("http:") or data.startswith("https:"):
|
|
||||||
print(('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data))
|
|
||||||
print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.")
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
if os.path.exists(data):
|
|
||||||
print(('"%s" looks like a filename. Reading data from the file.' % data))
|
|
||||||
with open(data) as fp:
|
|
||||||
data = fp.read()
|
|
||||||
except ValueError:
|
|
||||||
# This can happen on some platforms when the 'filename' is
|
|
||||||
# too long. Assume it's data and not a filename.
|
|
||||||
pass
|
|
||||||
print("")
|
|
||||||
|
|
||||||
for parser in basic_parsers:
|
for parser in basic_parsers:
|
||||||
print(("Trying to parse your markup with %s" % parser))
|
print(("Trying to parse your markup with %s" % parser))
|
||||||
|
|
|
@ -8,14 +8,8 @@ except ImportError as e:
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
try:
|
|
||||||
import soupsieve
|
|
||||||
except ImportError as e:
|
|
||||||
soupsieve = None
|
|
||||||
warnings.warn(
|
|
||||||
'The soupsieve package is not installed. CSS selectors cannot be used.'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
from bs4.css import CSS
|
||||||
from bs4.formatter import (
|
from bs4.formatter import (
|
||||||
Formatter,
|
Formatter,
|
||||||
HTMLFormatter,
|
HTMLFormatter,
|
||||||
|
@ -154,6 +148,11 @@ class PageElement(object):
|
||||||
NavigableString, Tag, etc. are all subclasses of PageElement.
|
NavigableString, Tag, etc. are all subclasses of PageElement.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# In general, we can't tell just by looking at an element whether
|
||||||
|
# it's contained in an XML document or an HTML document. But for
|
||||||
|
# Tags (q.v.) we can store this information at parse time.
|
||||||
|
known_xml = None
|
||||||
|
|
||||||
def setup(self, parent=None, previous_element=None, next_element=None,
|
def setup(self, parent=None, previous_element=None, next_element=None,
|
||||||
previous_sibling=None, next_sibling=None):
|
previous_sibling=None, next_sibling=None):
|
||||||
"""Sets up the initial relations between this element and
|
"""Sets up the initial relations between this element and
|
||||||
|
@ -941,11 +940,6 @@ class NavigableString(str, PageElement):
|
||||||
PREFIX = ''
|
PREFIX = ''
|
||||||
SUFFIX = ''
|
SUFFIX = ''
|
||||||
|
|
||||||
# We can't tell just by looking at a string whether it's contained
|
|
||||||
# in an XML document or an HTML document.
|
|
||||||
|
|
||||||
known_xml = None
|
|
||||||
|
|
||||||
def __new__(cls, value):
|
def __new__(cls, value):
|
||||||
"""Create a new NavigableString.
|
"""Create a new NavigableString.
|
||||||
|
|
||||||
|
@ -961,12 +955,22 @@ class NavigableString(str, PageElement):
|
||||||
u.setup()
|
u.setup()
|
||||||
return u
|
return u
|
||||||
|
|
||||||
def __copy__(self):
|
def __deepcopy__(self, memo, recursive=False):
|
||||||
"""A copy of a NavigableString has the same contents and class
|
"""A copy of a NavigableString has the same contents and class
|
||||||
as the original, but it is not connected to the parse tree.
|
as the original, but it is not connected to the parse tree.
|
||||||
|
|
||||||
|
:param recursive: This parameter is ignored; it's only defined
|
||||||
|
so that NavigableString.__deepcopy__ implements the same
|
||||||
|
signature as Tag.__deepcopy__.
|
||||||
"""
|
"""
|
||||||
return type(self)(self)
|
return type(self)(self)
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
"""A copy of a NavigableString can only be a deep copy, because
|
||||||
|
only one PageElement can occupy a given place in a parse tree.
|
||||||
|
"""
|
||||||
|
return self.__deepcopy__({})
|
||||||
|
|
||||||
def __getnewargs__(self):
|
def __getnewargs__(self):
|
||||||
return (str(self),)
|
return (str(self),)
|
||||||
|
|
||||||
|
@ -1311,10 +1315,46 @@ class Tag(PageElement):
|
||||||
|
|
||||||
parserClass = _alias("parser_class") # BS3
|
parserClass = _alias("parser_class") # BS3
|
||||||
|
|
||||||
def __copy__(self):
|
def __deepcopy__(self, memo, recursive=True):
|
||||||
"""A copy of a Tag is a new Tag, unconnected to the parse tree.
|
"""A deepcopy of a Tag is a new Tag, unconnected to the parse tree.
|
||||||
Its contents are a copy of the old Tag's contents.
|
Its contents are a copy of the old Tag's contents.
|
||||||
"""
|
"""
|
||||||
|
clone = self._clone()
|
||||||
|
|
||||||
|
if recursive:
|
||||||
|
# Clone this tag's descendants recursively, but without
|
||||||
|
# making any recursive function calls.
|
||||||
|
tag_stack = [clone]
|
||||||
|
for event, element in self._event_stream(self.descendants):
|
||||||
|
if event is Tag.END_ELEMENT_EVENT:
|
||||||
|
# Stop appending incoming Tags to the Tag that was
|
||||||
|
# just closed.
|
||||||
|
tag_stack.pop()
|
||||||
|
else:
|
||||||
|
descendant_clone = element.__deepcopy__(
|
||||||
|
memo, recursive=False
|
||||||
|
)
|
||||||
|
# Add to its parent's .contents
|
||||||
|
tag_stack[-1].append(descendant_clone)
|
||||||
|
|
||||||
|
if event is Tag.START_ELEMENT_EVENT:
|
||||||
|
# Add the Tag itself to the stack so that its
|
||||||
|
# children will be .appended to it.
|
||||||
|
tag_stack.append(descendant_clone)
|
||||||
|
return clone
|
||||||
|
|
||||||
|
def __copy__(self):
|
||||||
|
"""A copy of a Tag must always be a deep copy, because a Tag's
|
||||||
|
children can only have one parent at a time.
|
||||||
|
"""
|
||||||
|
return self.__deepcopy__({})
|
||||||
|
|
||||||
|
def _clone(self):
|
||||||
|
"""Create a new Tag just like this one, but with no
|
||||||
|
contents and unattached to any parse tree.
|
||||||
|
|
||||||
|
This is the first step in the deepcopy process.
|
||||||
|
"""
|
||||||
clone = type(self)(
|
clone = type(self)(
|
||||||
None, self.builder, self.name, self.namespace,
|
None, self.builder, self.name, self.namespace,
|
||||||
self.prefix, self.attrs, is_xml=self._is_xml,
|
self.prefix, self.attrs, is_xml=self._is_xml,
|
||||||
|
@ -1326,8 +1366,6 @@ class Tag(PageElement):
|
||||||
)
|
)
|
||||||
for attr in ('can_be_empty_element', 'hidden'):
|
for attr in ('can_be_empty_element', 'hidden'):
|
||||||
setattr(clone, attr, getattr(self, attr))
|
setattr(clone, attr, getattr(self, attr))
|
||||||
for child in self.contents:
|
|
||||||
clone.append(child.__copy__())
|
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -1650,28 +1688,178 @@ class Tag(PageElement):
|
||||||
|
|
||||||
def decode(self, indent_level=None,
|
def decode(self, indent_level=None,
|
||||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||||
formatter="minimal"):
|
formatter="minimal",
|
||||||
"""Render a Unicode representation of this PageElement and its
|
iterator=None):
|
||||||
contents.
|
pieces = []
|
||||||
|
|
||||||
:param indent_level: Each line of the rendering will be
|
|
||||||
indented this many spaces. Used internally in
|
|
||||||
recursive calls while pretty-printing.
|
|
||||||
:param eventual_encoding: The tag is destined to be
|
|
||||||
encoded into this encoding. This method is _not_
|
|
||||||
responsible for performing that encoding. This information
|
|
||||||
is passed in so that it can be substituted in if the
|
|
||||||
document contains a <META> tag that mentions the document's
|
|
||||||
encoding.
|
|
||||||
:param formatter: A Formatter object, or a string naming one of
|
|
||||||
the standard formatters.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# First off, turn a non-Formatter `formatter` into a Formatter
|
# First off, turn a non-Formatter `formatter` into a Formatter
|
||||||
# object. This will stop the lookup from happening over and
|
# object. This will stop the lookup from happening over and
|
||||||
# over again.
|
# over again.
|
||||||
if not isinstance(formatter, Formatter):
|
if not isinstance(formatter, Formatter):
|
||||||
formatter = self.formatter_for_name(formatter)
|
formatter = self.formatter_for_name(formatter)
|
||||||
|
|
||||||
|
if indent_level is True:
|
||||||
|
indent_level = 0
|
||||||
|
|
||||||
|
# The currently active tag that put us into string literal
|
||||||
|
# mode. Until this element is closed, children will be treated
|
||||||
|
# as string literals and not pretty-printed. String literal
|
||||||
|
# mode is turned on immediately after this tag begins, and
|
||||||
|
# turned off immediately before it's closed. This means there
|
||||||
|
# will be whitespace before and after the tag itself.
|
||||||
|
string_literal_tag = None
|
||||||
|
|
||||||
|
for event, element in self._event_stream(iterator):
|
||||||
|
if event in (Tag.START_ELEMENT_EVENT, Tag.EMPTY_ELEMENT_EVENT):
|
||||||
|
piece = element._format_tag(
|
||||||
|
eventual_encoding, formatter, opening=True
|
||||||
|
)
|
||||||
|
elif event is Tag.END_ELEMENT_EVENT:
|
||||||
|
piece = element._format_tag(
|
||||||
|
eventual_encoding, formatter, opening=False
|
||||||
|
)
|
||||||
|
if indent_level is not None:
|
||||||
|
indent_level -= 1
|
||||||
|
else:
|
||||||
|
piece = element.output_ready(formatter)
|
||||||
|
|
||||||
|
# Now we need to apply the 'prettiness' -- extra
|
||||||
|
# whitespace before and/or after this tag. This can get
|
||||||
|
# complicated because certain tags, like <pre> and
|
||||||
|
# <script>, can't be prettified, since adding whitespace would
|
||||||
|
# change the meaning of the content.
|
||||||
|
|
||||||
|
# The default behavior is to add whitespace before and
|
||||||
|
# after an element when string literal mode is off, and to
|
||||||
|
# leave things as they are when string literal mode is on.
|
||||||
|
if string_literal_tag:
|
||||||
|
indent_before = indent_after = False
|
||||||
|
else:
|
||||||
|
indent_before = indent_after = True
|
||||||
|
|
||||||
|
# The only time the behavior is more complex than that is
|
||||||
|
# when we encounter an opening or closing tag that might
|
||||||
|
# put us into or out of string literal mode.
|
||||||
|
if (event is Tag.START_ELEMENT_EVENT
|
||||||
|
and not string_literal_tag
|
||||||
|
and not element._should_pretty_print()):
|
||||||
|
# We are about to enter string literal mode. Add
|
||||||
|
# whitespace before this tag, but not after. We
|
||||||
|
# will stay in string literal mode until this tag
|
||||||
|
# is closed.
|
||||||
|
indent_before = True
|
||||||
|
indent_after = False
|
||||||
|
string_literal_tag = element
|
||||||
|
elif (event is Tag.END_ELEMENT_EVENT
|
||||||
|
and element is string_literal_tag):
|
||||||
|
# We are about to exit string literal mode by closing
|
||||||
|
# the tag that sent us into that mode. Add whitespace
|
||||||
|
# after this tag, but not before.
|
||||||
|
indent_before = False
|
||||||
|
indent_after = True
|
||||||
|
string_literal_tag = None
|
||||||
|
|
||||||
|
# Now we know whether to add whitespace before and/or
|
||||||
|
# after this element.
|
||||||
|
if indent_level is not None:
|
||||||
|
if (indent_before or indent_after):
|
||||||
|
if isinstance(element, NavigableString):
|
||||||
|
piece = piece.strip()
|
||||||
|
if piece:
|
||||||
|
piece = self._indent_string(
|
||||||
|
piece, indent_level, formatter,
|
||||||
|
indent_before, indent_after
|
||||||
|
)
|
||||||
|
if event == Tag.START_ELEMENT_EVENT:
|
||||||
|
indent_level += 1
|
||||||
|
pieces.append(piece)
|
||||||
|
return "".join(pieces)
|
||||||
|
|
||||||
|
# Names for the different events yielded by _event_stream
|
||||||
|
START_ELEMENT_EVENT = object()
|
||||||
|
END_ELEMENT_EVENT = object()
|
||||||
|
EMPTY_ELEMENT_EVENT = object()
|
||||||
|
STRING_ELEMENT_EVENT = object()
|
||||||
|
|
||||||
|
def _event_stream(self, iterator=None):
|
||||||
|
"""Yield a sequence of events that can be used to reconstruct the DOM
|
||||||
|
for this element.
|
||||||
|
|
||||||
|
This lets us recreate the nested structure of this element
|
||||||
|
(e.g. when formatting it as a string) without using recursive
|
||||||
|
method calls.
|
||||||
|
|
||||||
|
This is similar in concept to the SAX API, but it's a simpler
|
||||||
|
interface designed for internal use. The events are different
|
||||||
|
from SAX and the arguments associated with the events are Tags
|
||||||
|
and other Beautiful Soup objects.
|
||||||
|
|
||||||
|
:param iterator: An alternate iterator to use when traversing
|
||||||
|
the tree.
|
||||||
|
"""
|
||||||
|
tag_stack = []
|
||||||
|
|
||||||
|
iterator = iterator or self.self_and_descendants
|
||||||
|
|
||||||
|
for c in iterator:
|
||||||
|
# If the parent of the element we're about to yield is not
|
||||||
|
# the tag currently on the stack, it means that the tag on
|
||||||
|
# the stack closed before this element appeared.
|
||||||
|
while tag_stack and c.parent != tag_stack[-1]:
|
||||||
|
now_closed_tag = tag_stack.pop()
|
||||||
|
yield Tag.END_ELEMENT_EVENT, now_closed_tag
|
||||||
|
|
||||||
|
if isinstance(c, Tag):
|
||||||
|
if c.is_empty_element:
|
||||||
|
yield Tag.EMPTY_ELEMENT_EVENT, c
|
||||||
|
else:
|
||||||
|
yield Tag.START_ELEMENT_EVENT, c
|
||||||
|
tag_stack.append(c)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
yield Tag.STRING_ELEMENT_EVENT, c
|
||||||
|
|
||||||
|
while tag_stack:
|
||||||
|
now_closed_tag = tag_stack.pop()
|
||||||
|
yield Tag.END_ELEMENT_EVENT, now_closed_tag
|
||||||
|
|
||||||
|
def _indent_string(self, s, indent_level, formatter,
|
||||||
|
indent_before, indent_after):
|
||||||
|
"""Add indentation whitespace before and/or after a string.
|
||||||
|
|
||||||
|
:param s: The string to amend with whitespace.
|
||||||
|
:param indent_level: The indentation level; affects how much
|
||||||
|
whitespace goes before the string.
|
||||||
|
:param indent_before: Whether or not to add whitespace
|
||||||
|
before the string.
|
||||||
|
:param indent_after: Whether or not to add whitespace
|
||||||
|
(a newline) after the string.
|
||||||
|
"""
|
||||||
|
space_before = ''
|
||||||
|
if indent_before and indent_level:
|
||||||
|
space_before = (formatter.indent * indent_level)
|
||||||
|
|
||||||
|
space_after = ''
|
||||||
|
if indent_after:
|
||||||
|
space_after = "\n"
|
||||||
|
|
||||||
|
return space_before + s + space_after
|
||||||
|
|
||||||
|
def _format_tag(self, eventual_encoding, formatter, opening):
|
||||||
|
# A tag starts with the < character (see below).
|
||||||
|
|
||||||
|
# Then the / character, if this is a closing tag.
|
||||||
|
closing_slash = ''
|
||||||
|
if not opening:
|
||||||
|
closing_slash = '/'
|
||||||
|
|
||||||
|
# Then an optional namespace prefix.
|
||||||
|
prefix = ''
|
||||||
|
if self.prefix:
|
||||||
|
prefix = self.prefix + ":"
|
||||||
|
|
||||||
|
# Then a list of attribute values, if this is an opening tag.
|
||||||
|
attribute_string = ''
|
||||||
|
if opening:
|
||||||
attributes = formatter.attributes(self)
|
attributes = formatter.attributes(self)
|
||||||
attrs = []
|
attrs = []
|
||||||
for key, val in attributes:
|
for key, val in attributes:
|
||||||
|
@ -1693,63 +1881,19 @@ class Tag(PageElement):
|
||||||
str(key) + '='
|
str(key) + '='
|
||||||
+ formatter.quoted_attribute_value(text))
|
+ formatter.quoted_attribute_value(text))
|
||||||
attrs.append(decoded)
|
attrs.append(decoded)
|
||||||
close = ''
|
|
||||||
closeTag = ''
|
|
||||||
|
|
||||||
prefix = ''
|
|
||||||
if self.prefix:
|
|
||||||
prefix = self.prefix + ":"
|
|
||||||
|
|
||||||
if self.is_empty_element:
|
|
||||||
close = formatter.void_element_close_prefix or ''
|
|
||||||
else:
|
|
||||||
closeTag = '</%s%s>' % (prefix, self.name)
|
|
||||||
|
|
||||||
pretty_print = self._should_pretty_print(indent_level)
|
|
||||||
space = ''
|
|
||||||
indent_space = ''
|
|
||||||
if indent_level is not None:
|
|
||||||
indent_space = (formatter.indent * (indent_level - 1))
|
|
||||||
if pretty_print:
|
|
||||||
space = indent_space
|
|
||||||
indent_contents = indent_level + 1
|
|
||||||
else:
|
|
||||||
indent_contents = None
|
|
||||||
contents = self.decode_contents(
|
|
||||||
indent_contents, eventual_encoding, formatter
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.hidden:
|
|
||||||
# This is the 'document root' object.
|
|
||||||
s = contents
|
|
||||||
else:
|
|
||||||
s = []
|
|
||||||
attribute_string = ''
|
|
||||||
if attrs:
|
if attrs:
|
||||||
attribute_string = ' ' + ' '.join(attrs)
|
attribute_string = ' ' + ' '.join(attrs)
|
||||||
if indent_level is not None:
|
|
||||||
# Even if this particular tag is not pretty-printed,
|
|
||||||
# we should indent up to the start of the tag.
|
|
||||||
s.append(indent_space)
|
|
||||||
s.append('<%s%s%s%s>' % (
|
|
||||||
prefix, self.name, attribute_string, close))
|
|
||||||
if pretty_print:
|
|
||||||
s.append("\n")
|
|
||||||
s.append(contents)
|
|
||||||
if pretty_print and contents and contents[-1] != "\n":
|
|
||||||
s.append("\n")
|
|
||||||
if pretty_print and closeTag:
|
|
||||||
s.append(space)
|
|
||||||
s.append(closeTag)
|
|
||||||
if indent_level is not None and closeTag and self.next_sibling:
|
|
||||||
# Even if this particular tag is not pretty-printed,
|
|
||||||
# we're now done with the tag, and we should add a
|
|
||||||
# newline if appropriate.
|
|
||||||
s.append("\n")
|
|
||||||
s = ''.join(s)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def _should_pretty_print(self, indent_level):
|
# Then an optional closing slash (for a void element in an
|
||||||
|
# XML document).
|
||||||
|
void_element_closing_slash = ''
|
||||||
|
if self.is_empty_element:
|
||||||
|
void_element_closing_slash = formatter.void_element_close_prefix or ''
|
||||||
|
|
||||||
|
# Put it all together.
|
||||||
|
return '<' + closing_slash + prefix + self.name + attribute_string + void_element_closing_slash + '>'
|
||||||
|
|
||||||
|
def _should_pretty_print(self, indent_level=1):
|
||||||
"""Should this tag be pretty-printed?
|
"""Should this tag be pretty-printed?
|
||||||
|
|
||||||
Most of them should, but some (such as <pre> in HTML
|
Most of them should, but some (such as <pre> in HTML
|
||||||
|
@ -1800,32 +1944,8 @@ class Tag(PageElement):
|
||||||
the standard Formatters.
|
the standard Formatters.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# First off, turn a string formatter into a Formatter object. This
|
return self.decode(indent_level, eventual_encoding, formatter,
|
||||||
# will stop the lookup from happening over and over again.
|
iterator=self.descendants)
|
||||||
if not isinstance(formatter, Formatter):
|
|
||||||
formatter = self.formatter_for_name(formatter)
|
|
||||||
|
|
||||||
pretty_print = (indent_level is not None)
|
|
||||||
s = []
|
|
||||||
for c in self:
|
|
||||||
text = None
|
|
||||||
if isinstance(c, NavigableString):
|
|
||||||
text = c.output_ready(formatter)
|
|
||||||
elif isinstance(c, Tag):
|
|
||||||
s.append(c.decode(indent_level, eventual_encoding,
|
|
||||||
formatter))
|
|
||||||
preserve_whitespace = (
|
|
||||||
self.preserve_whitespace_tags and self.name in self.preserve_whitespace_tags
|
|
||||||
)
|
|
||||||
if text and indent_level and not preserve_whitespace:
|
|
||||||
text = text.strip()
|
|
||||||
if text:
|
|
||||||
if pretty_print and not preserve_whitespace:
|
|
||||||
s.append(formatter.indent * (indent_level - 1))
|
|
||||||
s.append(text)
|
|
||||||
if pretty_print and not preserve_whitespace:
|
|
||||||
s.append("\n")
|
|
||||||
return ''.join(s)
|
|
||||||
|
|
||||||
def encode_contents(
|
def encode_contents(
|
||||||
self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
|
self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
|
||||||
|
@ -1922,6 +2042,18 @@ class Tag(PageElement):
|
||||||
# return iter() to make the purpose of the method clear
|
# return iter() to make the purpose of the method clear
|
||||||
return iter(self.contents) # XXX This seems to be untested.
|
return iter(self.contents) # XXX This seems to be untested.
|
||||||
|
|
||||||
|
@property
|
||||||
|
def self_and_descendants(self):
|
||||||
|
"""Iterate over this PageElement and its children in a
|
||||||
|
breadth-first sequence.
|
||||||
|
|
||||||
|
:yield: A sequence of PageElements.
|
||||||
|
"""
|
||||||
|
if not self.hidden:
|
||||||
|
yield self
|
||||||
|
for i in self.descendants:
|
||||||
|
yield i
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def descendants(self):
|
def descendants(self):
|
||||||
"""Iterate over all children of this PageElement in a
|
"""Iterate over all children of this PageElement in a
|
||||||
|
@ -1954,10 +2086,7 @@ class Tag(PageElement):
|
||||||
:return: A Tag.
|
:return: A Tag.
|
||||||
:rtype: bs4.element.Tag
|
:rtype: bs4.element.Tag
|
||||||
"""
|
"""
|
||||||
value = self.select(selector, namespaces, 1, **kwargs)
|
return self.css.select_one(selector, namespaces, **kwargs)
|
||||||
if value:
|
|
||||||
return value[0]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def select(self, selector, namespaces=None, limit=None, **kwargs):
|
def select(self, selector, namespaces=None, limit=None, **kwargs):
|
||||||
"""Perform a CSS selection operation on the current element.
|
"""Perform a CSS selection operation on the current element.
|
||||||
|
@ -1979,21 +2108,12 @@ class Tag(PageElement):
|
||||||
:return: A ResultSet of Tags.
|
:return: A ResultSet of Tags.
|
||||||
:rtype: bs4.element.ResultSet
|
:rtype: bs4.element.ResultSet
|
||||||
"""
|
"""
|
||||||
if namespaces is None:
|
return self.css.select(selector, namespaces, limit, **kwargs)
|
||||||
namespaces = self._namespaces
|
|
||||||
|
|
||||||
if limit is None:
|
@property
|
||||||
limit = 0
|
def css(self):
|
||||||
if soupsieve is None:
|
"""Return an interface to the CSS selector API."""
|
||||||
raise NotImplementedError(
|
return CSS(self)
|
||||||
"Cannot execute CSS selectors because the soupsieve package is not installed."
|
|
||||||
)
|
|
||||||
|
|
||||||
results = soupsieve.select(selector, self, namespaces, limit, **kwargs)
|
|
||||||
|
|
||||||
# We do this because it's more consistent and because
|
|
||||||
# ResultSet.__getattr__ has a helpful error message.
|
|
||||||
return ResultSet(None, results)
|
|
||||||
|
|
||||||
# Old names for backwards compatibility
|
# Old names for backwards compatibility
|
||||||
def childGenerator(self):
|
def childGenerator(self):
|
||||||
|
|
|
@ -298,37 +298,11 @@ class TreeBuilderSmokeTest(object):
|
||||||
)
|
)
|
||||||
assert soup.a['class'] == ['a', 'b', 'c']
|
assert soup.a['class'] == ['a', 'b', 'c']
|
||||||
|
|
||||||
def test_fuzzed_input(self):
|
def test_invalid_doctype(self):
|
||||||
# This test centralizes in one place the various fuzz tests
|
markup = '<![if word]>content<![endif]>'
|
||||||
# for Beautiful Soup created by the oss-fuzz project.
|
markup = '<!DOCTYPE html]ff>'
|
||||||
|
|
||||||
# These strings superficially resemble markup, but they
|
|
||||||
# generally can't be parsed into anything. The best we can
|
|
||||||
# hope for is that parsing these strings won't crash the
|
|
||||||
# parser.
|
|
||||||
#
|
|
||||||
# n.b. This markup is commented out because these fuzz tests
|
|
||||||
# _do_ crash the parser. However the crashes are due to bugs
|
|
||||||
# in html.parser, not Beautiful Soup -- otherwise I'd fix the
|
|
||||||
# bugs!
|
|
||||||
|
|
||||||
bad_markup = [
|
|
||||||
# https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=28873
|
|
||||||
# https://github.com/guidovranken/python-library-fuzzers/blob/master/corp-html/519e5b4269a01185a0d5e76295251921da2f0700
|
|
||||||
# https://bugs.python.org/issue37747
|
|
||||||
#
|
|
||||||
#b'\n<![\xff\xfe\xfe\xcd\x00',
|
|
||||||
|
|
||||||
#https://github.com/guidovranken/python-library-fuzzers/blob/master/corp-html/de32aa55785be29bbc72a1a8e06b00611fb3d9f8
|
|
||||||
# https://bugs.python.org/issue34480
|
|
||||||
#
|
|
||||||
#b'<![n\x00'
|
|
||||||
]
|
|
||||||
for markup in bad_markup:
|
|
||||||
with warnings.catch_warnings(record=False):
|
|
||||||
soup = self.soup(markup)
|
soup = self.soup(markup)
|
||||||
|
|
||||||
|
|
||||||
class HTMLTreeBuilderSmokeTest(TreeBuilderSmokeTest):
|
class HTMLTreeBuilderSmokeTest(TreeBuilderSmokeTest):
|
||||||
|
|
||||||
"""A basic test of a treebuilder's competence.
|
"""A basic test of a treebuilder's competence.
|
||||||
|
@ -577,8 +551,8 @@ Hello, world!
|
||||||
"""Whitespace must be preserved in <pre> and <textarea> tags,
|
"""Whitespace must be preserved in <pre> and <textarea> tags,
|
||||||
even if that would mean not prettifying the markup.
|
even if that would mean not prettifying the markup.
|
||||||
"""
|
"""
|
||||||
pre_markup = "<pre> </pre>"
|
pre_markup = "<pre>a z</pre>\n"
|
||||||
textarea_markup = "<textarea> woo\nwoo </textarea>"
|
textarea_markup = "<textarea> woo\nwoo </textarea>\n"
|
||||||
self.assert_soup(pre_markup)
|
self.assert_soup(pre_markup)
|
||||||
self.assert_soup(textarea_markup)
|
self.assert_soup(textarea_markup)
|
||||||
|
|
||||||
|
@ -589,7 +563,7 @@ Hello, world!
|
||||||
assert soup.textarea.prettify() == textarea_markup
|
assert soup.textarea.prettify() == textarea_markup
|
||||||
|
|
||||||
soup = self.soup("<textarea></textarea>")
|
soup = self.soup("<textarea></textarea>")
|
||||||
assert soup.textarea.prettify() == "<textarea></textarea>"
|
assert soup.textarea.prettify() == "<textarea></textarea>\n"
|
||||||
|
|
||||||
def test_nested_inline_elements(self):
|
def test_nested_inline_elements(self):
|
||||||
"""Inline elements can be nested indefinitely."""
|
"""Inline elements can be nested indefinitely."""
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
˙<!DOCTyPEV PUBLIC'''Đ'
|
|
@ -0,0 +1 @@
|
||||||
|
)<a><math><TR><a><mI><a><p><a>
|
Binary file not shown.
|
@ -0,0 +1,2 @@
|
||||||
|
|
||||||
|
<![
|
|
@ -0,0 +1 @@
|
||||||
|
-<math><sElect><mi><sElect><sElect>
|
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -0,0 +1 @@
|
||||||
|
ñ<table><svg><html>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
487
lib/bs4/tests/test_css.py
Normal file
487
lib/bs4/tests/test_css.py
Normal file
|
@ -0,0 +1,487 @@
|
||||||
|
import pytest
|
||||||
|
import types
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
from bs4 import (
|
||||||
|
CSS,
|
||||||
|
BeautifulSoup,
|
||||||
|
ResultSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
SoupTest,
|
||||||
|
SOUP_SIEVE_PRESENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
if SOUP_SIEVE_PRESENT:
|
||||||
|
from soupsieve import SelectorSyntaxError
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not SOUP_SIEVE_PRESENT, reason="Soup Sieve not installed")
|
||||||
|
class TestCSSSelectors(SoupTest):
|
||||||
|
"""Test basic CSS selector functionality.
|
||||||
|
|
||||||
|
This functionality is implemented in soupsieve, which has a much
|
||||||
|
more comprehensive test suite, so this is basically an extra check
|
||||||
|
that soupsieve works as expected.
|
||||||
|
"""
|
||||||
|
|
||||||
|
HTML = """
|
||||||
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
|
||||||
|
"http://www.w3.org/TR/html4/strict.dtd">
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>The title</title>
|
||||||
|
<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag>
|
||||||
|
<div id="main" class="fancy">
|
||||||
|
<div id="inner">
|
||||||
|
<h1 id="header1">An H1</h1>
|
||||||
|
<p>Some text</p>
|
||||||
|
<p class="onep" id="p1">Some more text</p>
|
||||||
|
<h2 id="header2">An H2</h2>
|
||||||
|
<p class="class1 class2 class3" id="pmulti">Another</p>
|
||||||
|
<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
|
||||||
|
<h2 id="header3">Another H2</h2>
|
||||||
|
<a id="me" href="http://simonwillison.net/" rel="me">me</a>
|
||||||
|
<span class="s1">
|
||||||
|
<a href="#" id="s1a1">span1a1</a>
|
||||||
|
<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
|
||||||
|
<span class="span2">
|
||||||
|
<a href="#" id="s2a1">span2a1</a>
|
||||||
|
</span>
|
||||||
|
<span class="span3"></span>
|
||||||
|
<custom-dashed-tag class="dashed" id="dash2"/>
|
||||||
|
<div data-tag="dashedvalue" id="data1"/>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<x id="xid">
|
||||||
|
<z id="zida"/>
|
||||||
|
<z id="zidab"/>
|
||||||
|
<z id="zidac"/>
|
||||||
|
</x>
|
||||||
|
<y id="yid">
|
||||||
|
<z id="zidb"/>
|
||||||
|
</y>
|
||||||
|
<p lang="en" id="lang-en">English</p>
|
||||||
|
<p lang="en-gb" id="lang-en-gb">English UK</p>
|
||||||
|
<p lang="en-us" id="lang-en-us">English US</p>
|
||||||
|
<p lang="fr" id="lang-fr">French</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="footer">
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
self.soup = BeautifulSoup(self.HTML, 'html.parser')
|
||||||
|
|
||||||
|
def assert_selects(self, selector, expected_ids, **kwargs):
|
||||||
|
results = self.soup.select(selector, **kwargs)
|
||||||
|
assert isinstance(results, ResultSet)
|
||||||
|
el_ids = [el['id'] for el in results]
|
||||||
|
el_ids.sort()
|
||||||
|
expected_ids.sort()
|
||||||
|
assert expected_ids == el_ids, "Selector %s, expected [%s], got [%s]" % (
|
||||||
|
selector, ', '.join(expected_ids), ', '.join(el_ids)
|
||||||
|
)
|
||||||
|
|
||||||
|
assertSelect = assert_selects
|
||||||
|
|
||||||
|
def assert_select_multiple(self, *tests):
|
||||||
|
for selector, expected_ids in tests:
|
||||||
|
self.assert_selects(selector, expected_ids)
|
||||||
|
|
||||||
|
def test_precompiled(self):
|
||||||
|
sel = self.soup.css.compile('div')
|
||||||
|
|
||||||
|
els = self.soup.select(sel)
|
||||||
|
assert len(els) == 4
|
||||||
|
for div in els:
|
||||||
|
assert div.name == 'div'
|
||||||
|
|
||||||
|
el = self.soup.select_one(sel)
|
||||||
|
assert 'main' == el['id']
|
||||||
|
|
||||||
|
def test_one_tag_one(self):
|
||||||
|
els = self.soup.select('title')
|
||||||
|
assert len(els) == 1
|
||||||
|
assert els[0].name == 'title'
|
||||||
|
assert els[0].contents == ['The title']
|
||||||
|
|
||||||
|
def test_one_tag_many(self):
|
||||||
|
els = self.soup.select('div')
|
||||||
|
assert len(els) == 4
|
||||||
|
for div in els:
|
||||||
|
assert div.name == 'div'
|
||||||
|
|
||||||
|
el = self.soup.select_one('div')
|
||||||
|
assert 'main' == el['id']
|
||||||
|
|
||||||
|
def test_select_one_returns_none_if_no_match(self):
|
||||||
|
match = self.soup.select_one('nonexistenttag')
|
||||||
|
assert None == match
|
||||||
|
|
||||||
|
|
||||||
|
def test_tag_in_tag_one(self):
|
||||||
|
els = self.soup.select('div div')
|
||||||
|
self.assert_selects('div div', ['inner', 'data1'])
|
||||||
|
|
||||||
|
def test_tag_in_tag_many(self):
|
||||||
|
for selector in ('html div', 'html body div', 'body div'):
|
||||||
|
self.assert_selects(selector, ['data1', 'main', 'inner', 'footer'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_limit(self):
|
||||||
|
self.assert_selects('html div', ['main'], limit=1)
|
||||||
|
self.assert_selects('html body div', ['inner', 'main'], limit=2)
|
||||||
|
self.assert_selects('body div', ['data1', 'main', 'inner', 'footer'],
|
||||||
|
limit=10)
|
||||||
|
|
||||||
|
def test_tag_no_match(self):
|
||||||
|
assert len(self.soup.select('del')) == 0
|
||||||
|
|
||||||
|
def test_invalid_tag(self):
|
||||||
|
with pytest.raises(SelectorSyntaxError):
|
||||||
|
self.soup.select('tag%t')
|
||||||
|
|
||||||
|
def test_select_dashed_tag_ids(self):
|
||||||
|
self.assert_selects('custom-dashed-tag', ['dash1', 'dash2'])
|
||||||
|
|
||||||
|
def test_select_dashed_by_id(self):
|
||||||
|
dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]')
|
||||||
|
assert dashed[0].name == 'custom-dashed-tag'
|
||||||
|
assert dashed[0]['id'] == 'dash2'
|
||||||
|
|
||||||
|
def test_dashed_tag_text(self):
|
||||||
|
assert self.soup.select('body > custom-dashed-tag')[0].text == 'Hello there.'
|
||||||
|
|
||||||
|
def test_select_dashed_matches_find_all(self):
|
||||||
|
assert self.soup.select('custom-dashed-tag') == self.soup.find_all('custom-dashed-tag')
|
||||||
|
|
||||||
|
def test_header_tags(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('h1', ['header1']),
|
||||||
|
('h2', ['header2', 'header3']),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_class_one(self):
|
||||||
|
for selector in ('.onep', 'p.onep', 'html p.onep'):
|
||||||
|
els = self.soup.select(selector)
|
||||||
|
assert len(els) == 1
|
||||||
|
assert els[0].name == 'p'
|
||||||
|
assert els[0]['class'] == ['onep']
|
||||||
|
|
||||||
|
def test_class_mismatched_tag(self):
|
||||||
|
els = self.soup.select('div.onep')
|
||||||
|
assert len(els) == 0
|
||||||
|
|
||||||
|
def test_one_id(self):
|
||||||
|
for selector in ('div#inner', '#inner', 'div div#inner'):
|
||||||
|
self.assert_selects(selector, ['inner'])
|
||||||
|
|
||||||
|
def test_bad_id(self):
|
||||||
|
els = self.soup.select('#doesnotexist')
|
||||||
|
assert len(els) == 0
|
||||||
|
|
||||||
|
def test_items_in_id(self):
|
||||||
|
els = self.soup.select('div#inner p')
|
||||||
|
assert len(els) == 3
|
||||||
|
for el in els:
|
||||||
|
assert el.name == 'p'
|
||||||
|
assert els[1]['class'] == ['onep']
|
||||||
|
assert not els[0].has_attr('class')
|
||||||
|
|
||||||
|
def test_a_bunch_of_emptys(self):
|
||||||
|
for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
|
||||||
|
assert len(self.soup.select(selector)) == 0
|
||||||
|
|
||||||
|
def test_multi_class_support(self):
|
||||||
|
for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
|
||||||
|
'.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
|
||||||
|
self.assert_selects(selector, ['pmulti'])
|
||||||
|
|
||||||
|
def test_multi_class_selection(self):
|
||||||
|
for selector in ('.class1.class3', '.class3.class2',
|
||||||
|
'.class1.class2.class3'):
|
||||||
|
self.assert_selects(selector, ['pmulti'])
|
||||||
|
|
||||||
|
def test_child_selector(self):
|
||||||
|
self.assert_selects('.s1 > a', ['s1a1', 's1a2'])
|
||||||
|
self.assert_selects('.s1 > a span', ['s1a2s1'])
|
||||||
|
|
||||||
|
def test_child_selector_id(self):
|
||||||
|
self.assert_selects('.s1 > a#s1a2 span', ['s1a2s1'])
|
||||||
|
|
||||||
|
def test_attribute_equals(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('p[class="onep"]', ['p1']),
|
||||||
|
('p[id="p1"]', ['p1']),
|
||||||
|
('[class="onep"]', ['p1']),
|
||||||
|
('[id="p1"]', ['p1']),
|
||||||
|
('link[rel="stylesheet"]', ['l1']),
|
||||||
|
('link[type="text/css"]', ['l1']),
|
||||||
|
('link[href="blah.css"]', ['l1']),
|
||||||
|
('link[href="no-blah.css"]', []),
|
||||||
|
('[rel="stylesheet"]', ['l1']),
|
||||||
|
('[type="text/css"]', ['l1']),
|
||||||
|
('[href="blah.css"]', ['l1']),
|
||||||
|
('[href="no-blah.css"]', []),
|
||||||
|
('p[href="no-blah.css"]', []),
|
||||||
|
('[href="no-blah.css"]', []),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_tilde(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('p[class~="class1"]', ['pmulti']),
|
||||||
|
('p[class~="class2"]', ['pmulti']),
|
||||||
|
('p[class~="class3"]', ['pmulti']),
|
||||||
|
('[class~="class1"]', ['pmulti']),
|
||||||
|
('[class~="class2"]', ['pmulti']),
|
||||||
|
('[class~="class3"]', ['pmulti']),
|
||||||
|
('a[rel~="friend"]', ['bob']),
|
||||||
|
('a[rel~="met"]', ['bob']),
|
||||||
|
('[rel~="friend"]', ['bob']),
|
||||||
|
('[rel~="met"]', ['bob']),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_startswith(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('[rel^="style"]', ['l1']),
|
||||||
|
('link[rel^="style"]', ['l1']),
|
||||||
|
('notlink[rel^="notstyle"]', []),
|
||||||
|
('[rel^="notstyle"]', []),
|
||||||
|
('link[rel^="notstyle"]', []),
|
||||||
|
('link[href^="bla"]', ['l1']),
|
||||||
|
('a[href^="http://"]', ['bob', 'me']),
|
||||||
|
('[href^="http://"]', ['bob', 'me']),
|
||||||
|
('[id^="p"]', ['pmulti', 'p1']),
|
||||||
|
('[id^="m"]', ['me', 'main']),
|
||||||
|
('div[id^="m"]', ['main']),
|
||||||
|
('a[id^="m"]', ['me']),
|
||||||
|
('div[data-tag^="dashed"]', ['data1'])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_endswith(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('[href$=".css"]', ['l1']),
|
||||||
|
('link[href$=".css"]', ['l1']),
|
||||||
|
('link[id$="1"]', ['l1']),
|
||||||
|
('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']),
|
||||||
|
('div[id$="1"]', ['data1']),
|
||||||
|
('[id$="noending"]', []),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_contains(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
# From test_attribute_startswith
|
||||||
|
('[rel*="style"]', ['l1']),
|
||||||
|
('link[rel*="style"]', ['l1']),
|
||||||
|
('notlink[rel*="notstyle"]', []),
|
||||||
|
('[rel*="notstyle"]', []),
|
||||||
|
('link[rel*="notstyle"]', []),
|
||||||
|
('link[href*="bla"]', ['l1']),
|
||||||
|
('[href*="http://"]', ['bob', 'me']),
|
||||||
|
('[id*="p"]', ['pmulti', 'p1']),
|
||||||
|
('div[id*="m"]', ['main']),
|
||||||
|
('a[id*="m"]', ['me']),
|
||||||
|
# From test_attribute_endswith
|
||||||
|
('[href*=".css"]', ['l1']),
|
||||||
|
('link[href*=".css"]', ['l1']),
|
||||||
|
('link[id*="1"]', ['l1']),
|
||||||
|
('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']),
|
||||||
|
('div[id*="1"]', ['data1']),
|
||||||
|
('[id*="noending"]', []),
|
||||||
|
# New for this test
|
||||||
|
('[href*="."]', ['bob', 'me', 'l1']),
|
||||||
|
('a[href*="."]', ['bob', 'me']),
|
||||||
|
('link[href*="."]', ['l1']),
|
||||||
|
('div[id*="n"]', ['main', 'inner']),
|
||||||
|
('div[id*="nn"]', ['inner']),
|
||||||
|
('div[data-tag*="edval"]', ['data1'])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_exact_or_hypen(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
|
||||||
|
('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
|
||||||
|
('p[lang|="fr"]', ['lang-fr']),
|
||||||
|
('p[lang|="gb"]', []),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attribute_exists(self):
|
||||||
|
self.assert_select_multiple(
|
||||||
|
('[rel]', ['l1', 'bob', 'me']),
|
||||||
|
('link[rel]', ['l1']),
|
||||||
|
('a[rel]', ['bob', 'me']),
|
||||||
|
('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
|
||||||
|
('p[class]', ['p1', 'pmulti']),
|
||||||
|
('[blah]', []),
|
||||||
|
('p[blah]', []),
|
||||||
|
('div[data-tag]', ['data1'])
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_quoted_space_in_selector_name(self):
|
||||||
|
html = """<div style="display: wrong">nope</div>
|
||||||
|
<div style="display: right">yes</div>
|
||||||
|
"""
|
||||||
|
soup = BeautifulSoup(html, 'html.parser')
|
||||||
|
[chosen] = soup.select('div[style="display: right"]')
|
||||||
|
assert "yes" == chosen.string
|
||||||
|
|
||||||
|
def test_unsupported_pseudoclass(self):
|
||||||
|
with pytest.raises(NotImplementedError):
|
||||||
|
self.soup.select("a:no-such-pseudoclass")
|
||||||
|
|
||||||
|
with pytest.raises(SelectorSyntaxError):
|
||||||
|
self.soup.select("a:nth-of-type(a)")
|
||||||
|
|
||||||
|
def test_nth_of_type(self):
|
||||||
|
# Try to select first paragraph
|
||||||
|
els = self.soup.select('div#inner p:nth-of-type(1)')
|
||||||
|
assert len(els) == 1
|
||||||
|
assert els[0].string == 'Some text'
|
||||||
|
|
||||||
|
# Try to select third paragraph
|
||||||
|
els = self.soup.select('div#inner p:nth-of-type(3)')
|
||||||
|
assert len(els) == 1
|
||||||
|
assert els[0].string == 'Another'
|
||||||
|
|
||||||
|
# Try to select (non-existent!) fourth paragraph
|
||||||
|
els = self.soup.select('div#inner p:nth-of-type(4)')
|
||||||
|
assert len(els) == 0
|
||||||
|
|
||||||
|
# Zero will select no tags.
|
||||||
|
els = self.soup.select('div p:nth-of-type(0)')
|
||||||
|
assert len(els) == 0
|
||||||
|
|
||||||
|
def test_nth_of_type_direct_descendant(self):
|
||||||
|
els = self.soup.select('div#inner > p:nth-of-type(1)')
|
||||||
|
assert len(els) == 1
|
||||||
|
assert els[0].string == 'Some text'
|
||||||
|
|
||||||
|
def test_id_child_selector_nth_of_type(self):
|
||||||
|
self.assert_selects('#inner > p:nth-of-type(2)', ['p1'])
|
||||||
|
|
||||||
|
def test_select_on_element(self):
|
||||||
|
# Other tests operate on the tree; this operates on an element
|
||||||
|
# within the tree.
|
||||||
|
inner = self.soup.find("div", id="main")
|
||||||
|
selected = inner.select("div")
|
||||||
|
# The <div id="inner"> tag was selected. The <div id="footer">
|
||||||
|
# tag was not.
|
||||||
|
self.assert_selects_ids(selected, ['inner', 'data1'])
|
||||||
|
|
||||||
|
def test_overspecified_child_id(self):
|
||||||
|
self.assert_selects(".fancy #inner", ['inner'])
|
||||||
|
self.assert_selects(".normal #inner", [])
|
||||||
|
|
||||||
|
def test_adjacent_sibling_selector(self):
|
||||||
|
self.assert_selects('#p1 + h2', ['header2'])
|
||||||
|
self.assert_selects('#p1 + h2 + p', ['pmulti'])
|
||||||
|
self.assert_selects('#p1 + #header2 + .class1', ['pmulti'])
|
||||||
|
assert [] == self.soup.select('#p1 + p')
|
||||||
|
|
||||||
|
def test_general_sibling_selector(self):
|
||||||
|
self.assert_selects('#p1 ~ h2', ['header2', 'header3'])
|
||||||
|
self.assert_selects('#p1 ~ #header2', ['header2'])
|
||||||
|
self.assert_selects('#p1 ~ h2 + a', ['me'])
|
||||||
|
self.assert_selects('#p1 ~ h2 + [rel="me"]', ['me'])
|
||||||
|
assert [] == self.soup.select('#inner ~ h2')
|
||||||
|
|
||||||
|
def test_dangling_combinator(self):
|
||||||
|
with pytest.raises(SelectorSyntaxError):
|
||||||
|
self.soup.select('h1 >')
|
||||||
|
|
||||||
|
def test_sibling_combinator_wont_select_same_tag_twice(self):
|
||||||
|
self.assert_selects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
|
||||||
|
|
||||||
|
# Test the selector grouping operator (the comma)
|
||||||
|
def test_multiple_select(self):
|
||||||
|
self.assert_selects('x, y', ['xid', 'yid'])
|
||||||
|
|
||||||
|
def test_multiple_select_with_no_space(self):
|
||||||
|
self.assert_selects('x,y', ['xid', 'yid'])
|
||||||
|
|
||||||
|
def test_multiple_select_with_more_space(self):
|
||||||
|
self.assert_selects('x, y', ['xid', 'yid'])
|
||||||
|
|
||||||
|
def test_multiple_select_duplicated(self):
|
||||||
|
self.assert_selects('x, x', ['xid'])
|
||||||
|
|
||||||
|
def test_multiple_select_sibling(self):
|
||||||
|
self.assert_selects('x, y ~ p[lang=fr]', ['xid', 'lang-fr'])
|
||||||
|
|
||||||
|
def test_multiple_select_tag_and_direct_descendant(self):
|
||||||
|
self.assert_selects('x, y > z', ['xid', 'zidb'])
|
||||||
|
|
||||||
|
def test_multiple_select_direct_descendant_and_tags(self):
|
||||||
|
self.assert_selects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
|
||||||
|
|
||||||
|
def test_multiple_select_indirect_descendant(self):
|
||||||
|
self.assert_selects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
|
||||||
|
|
||||||
|
def test_invalid_multiple_select(self):
|
||||||
|
with pytest.raises(SelectorSyntaxError):
|
||||||
|
self.soup.select(',x, y')
|
||||||
|
with pytest.raises(SelectorSyntaxError):
|
||||||
|
self.soup.select('x,,y')
|
||||||
|
|
||||||
|
def test_multiple_select_attrs(self):
|
||||||
|
self.assert_selects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb'])
|
||||||
|
|
||||||
|
def test_multiple_select_ids(self):
|
||||||
|
self.assert_selects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab'])
|
||||||
|
|
||||||
|
def test_multiple_select_nested(self):
|
||||||
|
self.assert_selects('body > div > x, y > z', ['xid', 'zidb'])
|
||||||
|
|
||||||
|
def test_select_duplicate_elements(self):
|
||||||
|
# When markup contains duplicate elements, a multiple select
|
||||||
|
# will find all of them.
|
||||||
|
markup = '<div class="c1"/><div class="c2"/><div class="c1"/>'
|
||||||
|
soup = BeautifulSoup(markup, 'html.parser')
|
||||||
|
selected = soup.select(".c1, .c2")
|
||||||
|
assert 3 == len(selected)
|
||||||
|
|
||||||
|
# Verify that find_all finds the same elements, though because
|
||||||
|
# of an implementation detail it finds them in a different
|
||||||
|
# order.
|
||||||
|
for element in soup.find_all(class_=['c1', 'c2']):
|
||||||
|
assert element in selected
|
||||||
|
|
||||||
|
def test_closest(self):
|
||||||
|
inner = self.soup.find("div", id="inner")
|
||||||
|
closest = inner.css.closest("div[id=main]")
|
||||||
|
assert closest == self.soup.find("div", id="main")
|
||||||
|
|
||||||
|
def test_match(self):
|
||||||
|
inner = self.soup.find("div", id="inner")
|
||||||
|
main = self.soup.find("div", id="main")
|
||||||
|
assert inner.css.match("div[id=main]") == False
|
||||||
|
assert main.css.match("div[id=main]") == True
|
||||||
|
|
||||||
|
def test_iselect(self):
|
||||||
|
gen = self.soup.css.iselect("h2")
|
||||||
|
assert isinstance(gen, types.GeneratorType)
|
||||||
|
[header2, header3] = gen
|
||||||
|
assert header2['id'] == 'header2'
|
||||||
|
assert header3['id'] == 'header3'
|
||||||
|
|
||||||
|
def test_filter(self):
|
||||||
|
inner = self.soup.find("div", id="inner")
|
||||||
|
results = inner.css.filter("h2")
|
||||||
|
assert len(inner.css.filter("h2")) == 2
|
||||||
|
|
||||||
|
results = inner.css.filter("h2[id=header3]")
|
||||||
|
assert isinstance(results, ResultSet)
|
||||||
|
[result] = results
|
||||||
|
assert result['id'] == 'header3'
|
||||||
|
|
||||||
|
def test_escape(self):
|
||||||
|
m = self.soup.css.escape
|
||||||
|
assert m(".foo#bar") == '\\.foo\\#bar'
|
||||||
|
assert m("()[]{}") == '\\(\\)\\[\\]\\{\\}'
|
||||||
|
assert m(".foo") == self.soup.css.escape(".foo")
|
|
@ -80,20 +80,20 @@ class TestFormatter(SoupTest):
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"indent,expect",
|
"indent,expect",
|
||||||
[
|
[
|
||||||
(None, '<a>\n<b>\ntext\n</b>\n</a>'),
|
(None, '<a>\n<b>\ntext\n</b>\n</a>\n'),
|
||||||
(-1, '<a>\n<b>\ntext\n</b>\n</a>'),
|
(-1, '<a>\n<b>\ntext\n</b>\n</a>\n'),
|
||||||
(0, '<a>\n<b>\ntext\n</b>\n</a>'),
|
(0, '<a>\n<b>\ntext\n</b>\n</a>\n'),
|
||||||
("", '<a>\n<b>\ntext\n</b>\n</a>'),
|
("", '<a>\n<b>\ntext\n</b>\n</a>\n'),
|
||||||
|
|
||||||
(1, '<a>\n <b>\n text\n </b>\n</a>'),
|
(1, '<a>\n <b>\n text\n </b>\n</a>\n'),
|
||||||
(2, '<a>\n <b>\n text\n </b>\n</a>'),
|
(2, '<a>\n <b>\n text\n </b>\n</a>\n'),
|
||||||
|
|
||||||
("\t", '<a>\n\t<b>\n\t\ttext\n\t</b>\n</a>'),
|
("\t", '<a>\n\t<b>\n\t\ttext\n\t</b>\n</a>\n'),
|
||||||
('abc', '<a>\nabc<b>\nabcabctext\nabc</b>\n</a>'),
|
('abc', '<a>\nabc<b>\nabcabctext\nabc</b>\n</a>\n'),
|
||||||
|
|
||||||
# Some invalid inputs -- the default behavior is used.
|
# Some invalid inputs -- the default behavior is used.
|
||||||
(object(), '<a>\n <b>\n text\n </b>\n</a>'),
|
(object(), '<a>\n <b>\n text\n </b>\n</a>\n'),
|
||||||
(b'bytes', '<a>\n <b>\n text\n </b>\n</a>'),
|
(b'bytes', '<a>\n <b>\n text\n </b>\n</a>\n'),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_indent(self, indent, expect):
|
def test_indent(self, indent, expect):
|
||||||
|
|
91
lib/bs4/tests/test_fuzz.py
Normal file
91
lib/bs4/tests/test_fuzz.py
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
"""This file contains test cases reported by third parties using
|
||||||
|
fuzzing tools, primarily from Google's oss-fuzz project. Some of these
|
||||||
|
represent real problems with Beautiful Soup, but many are problems in
|
||||||
|
libraries that Beautiful Soup depends on, and many of the test cases
|
||||||
|
represent different ways of triggering the same problem.
|
||||||
|
|
||||||
|
Grouping these test cases together makes it easy to see which test
|
||||||
|
cases represent the same problem, and puts the test cases in close
|
||||||
|
proximity to code that can trigger the problems.
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
from bs4 import (
|
||||||
|
BeautifulSoup,
|
||||||
|
ParserRejectedMarkup,
|
||||||
|
)
|
||||||
|
|
||||||
|
class TestFuzz(object):
|
||||||
|
|
||||||
|
# Test case markup files from fuzzers are given this extension so
|
||||||
|
# they can be included in builds.
|
||||||
|
TESTCASE_SUFFIX = ".testcase"
|
||||||
|
|
||||||
|
# This class of error has been fixed by catching a less helpful
|
||||||
|
# exception from html.parser and raising ParserRejectedMarkup
|
||||||
|
# instead.
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_rejected_markup(self, filename):
|
||||||
|
markup = self.__markup(filename)
|
||||||
|
with pytest.raises(ParserRejectedMarkup):
|
||||||
|
BeautifulSoup(markup, 'html.parser')
|
||||||
|
|
||||||
|
# This class of error has to do with very deeply nested documents
|
||||||
|
# which overflow the Python call stack when the tree is converted
|
||||||
|
# to a string. This is an issue with Beautiful Soup which was fixed
|
||||||
|
# as part of [bug=1471755].
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5167584867909632",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6124268085182464",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_deeply_nested_document(self, filename):
|
||||||
|
# Parsing the document and encoding it back to a string is
|
||||||
|
# sufficient to demonstrate that the overflow problem has
|
||||||
|
# been fixed.
|
||||||
|
markup = self.__markup(filename)
|
||||||
|
BeautifulSoup(markup, 'html.parser').encode()
|
||||||
|
|
||||||
|
# This class of error represents problems with html5lib's parser,
|
||||||
|
# not Beautiful Soup. I use
|
||||||
|
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
||||||
|
# the html5lib developers of these issues.
|
||||||
|
@pytest.mark.skip("html5lib problems")
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
# b"""ÿ<!DOCTyPEV PUBLIC'''Ð'"""
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-4818336571064320",
|
||||||
|
|
||||||
|
# b')<a><math><TR><a><mI><a><p><a>'
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-4999465949331456",
|
||||||
|
|
||||||
|
# b'-<math><sElect><mi><sElect><sElect>'
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896",
|
||||||
|
|
||||||
|
# b'ñ<table><svg><html>'
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224",
|
||||||
|
|
||||||
|
# <TABLE>, some ^@ characters, some <math> tags.
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6600557255327744",
|
||||||
|
|
||||||
|
# Nested table
|
||||||
|
"crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_html5lib_parse_errors(self, filename):
|
||||||
|
markup = self.__markup(filename)
|
||||||
|
print(BeautifulSoup(markup, 'html5lib').encode())
|
||||||
|
|
||||||
|
def __markup(self, filename):
|
||||||
|
if not filename.endswith(self.TESTCASE_SUFFIX):
|
||||||
|
filename += self.TESTCASE_SUFFIX
|
||||||
|
this_dir = os.path.split(__file__)[0]
|
||||||
|
path = os.path.join(this_dir, 'fuzz', filename)
|
||||||
|
return open(path, 'rb').read()
|
|
@ -3,9 +3,11 @@ trees."""
|
||||||
|
|
||||||
from pdb import set_trace
|
from pdb import set_trace
|
||||||
import pickle
|
import pickle
|
||||||
|
import pytest
|
||||||
import warnings
|
import warnings
|
||||||
from bs4.builder import (
|
from bs4.builder import (
|
||||||
HTMLParserTreeBuilder,
|
HTMLParserTreeBuilder,
|
||||||
|
ParserRejectedMarkup,
|
||||||
XMLParsedAsHTMLWarning,
|
XMLParsedAsHTMLWarning,
|
||||||
)
|
)
|
||||||
from bs4.builder._htmlparser import BeautifulSoupHTMLParser
|
from bs4.builder._htmlparser import BeautifulSoupHTMLParser
|
||||||
|
@ -15,6 +17,28 @@ class TestHTMLParserTreeBuilder(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||||
|
|
||||||
default_builder = HTMLParserTreeBuilder
|
default_builder = HTMLParserTreeBuilder
|
||||||
|
|
||||||
|
def test_rejected_input(self):
|
||||||
|
# Python's html.parser will occasionally reject markup,
|
||||||
|
# especially when there is a problem with the initial DOCTYPE
|
||||||
|
# declaration. Different versions of Python sound the alarm in
|
||||||
|
# different ways, but Beautiful Soup consistently raises
|
||||||
|
# errors as ParserRejectedMarkup exceptions.
|
||||||
|
bad_markup = [
|
||||||
|
# https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=28873
|
||||||
|
# https://github.com/guidovranken/python-library-fuzzers/blob/master/corp-html/519e5b4269a01185a0d5e76295251921da2f0700
|
||||||
|
# https://github.com/python/cpython/issues/81928
|
||||||
|
b'\n<![\xff\xfe\xfe\xcd\x00',
|
||||||
|
|
||||||
|
#https://github.com/guidovranken/python-library-fuzzers/blob/master/corp-html/de32aa55785be29bbc72a1a8e06b00611fb3d9f8
|
||||||
|
# https://github.com/python/cpython/issues/78661
|
||||||
|
#
|
||||||
|
b'<![n\x00',
|
||||||
|
b"<![UNKNOWN[]]>",
|
||||||
|
]
|
||||||
|
for markup in bad_markup:
|
||||||
|
with pytest.raises(ParserRejectedMarkup):
|
||||||
|
soup = self.soup(markup)
|
||||||
|
|
||||||
def test_namespaced_system_doctype(self):
|
def test_namespaced_system_doctype(self):
|
||||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -189,13 +189,15 @@ class TestLXMLXMLTreeBuilder(SoupTest, XMLTreeBuilderSmokeTest):
|
||||||
assert soup.find('prefix:tag3').name == 'tag3'
|
assert soup.find('prefix:tag3').name == 'tag3'
|
||||||
assert soup.subtag.find('prefix:tag3').name == 'tag3'
|
assert soup.subtag.find('prefix:tag3').name == 'tag3'
|
||||||
|
|
||||||
def test_pickle_removes_builder(self):
|
def test_pickle_restores_builder(self):
|
||||||
# The lxml TreeBuilder is not picklable, so it won't be
|
# The lxml TreeBuilder is not picklable, so when unpickling
|
||||||
# preserved in a pickle/unpickle operation.
|
# a document created with it, a new TreeBuilder of the
|
||||||
|
# appropriate class is created.
|
||||||
soup = self.soup("<a>some markup</a>")
|
soup = self.soup("<a>some markup</a>")
|
||||||
assert isinstance(soup.builder, self.default_builder)
|
assert isinstance(soup.builder, self.default_builder)
|
||||||
pickled = pickle.dumps(soup)
|
pickled = pickle.dumps(soup)
|
||||||
unpickled = pickle.loads(pickled)
|
unpickled = pickle.loads(pickled)
|
||||||
|
|
||||||
assert "some markup" == unpickled.a.string
|
assert "some markup" == unpickled.a.string
|
||||||
assert unpickled.builder is None
|
assert unpickled.builder != soup.builder
|
||||||
|
assert isinstance(unpickled.builder, self.default_builder)
|
||||||
|
|
|
@ -2,20 +2,18 @@
|
||||||
import copy
|
import copy
|
||||||
import pickle
|
import pickle
|
||||||
import pytest
|
import pytest
|
||||||
|
import sys
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from bs4.element import (
|
from bs4.element import (
|
||||||
Comment,
|
Comment,
|
||||||
|
ResultSet,
|
||||||
SoupStrainer,
|
SoupStrainer,
|
||||||
)
|
)
|
||||||
from . import (
|
from . import (
|
||||||
SoupTest,
|
SoupTest,
|
||||||
SOUP_SIEVE_PRESENT,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if SOUP_SIEVE_PRESENT:
|
|
||||||
from soupsieve import SelectorSyntaxError
|
|
||||||
|
|
||||||
class TestEncoding(SoupTest):
|
class TestEncoding(SoupTest):
|
||||||
"""Test the ability to encode objects into strings."""
|
"""Test the ability to encode objects into strings."""
|
||||||
|
|
||||||
|
@ -52,9 +50,20 @@ class TestEncoding(SoupTest):
|
||||||
encoding="utf8"
|
encoding="utf8"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_encode_deeply_nested_document(self):
|
||||||
|
# This test verifies that encoding a string doesn't involve
|
||||||
|
# any recursive function calls. If it did, this test would
|
||||||
|
# overflow the Python interpreter stack.
|
||||||
|
limit = sys.getrecursionlimit() + 1
|
||||||
|
markup = "<span>" * limit
|
||||||
|
soup = self.soup(markup)
|
||||||
|
encoded = soup.encode()
|
||||||
|
assert limit == encoded.count(b"<span>")
|
||||||
|
|
||||||
def test_deprecated_renderContents(self):
|
def test_deprecated_renderContents(self):
|
||||||
html = "<b>\N{SNOWMAN}</b>"
|
html = "<b>\N{SNOWMAN}</b>"
|
||||||
soup = self.soup(html)
|
soup = self.soup(html)
|
||||||
|
soup.renderContents()
|
||||||
assert "\N{SNOWMAN}".encode("utf8") == soup.b.renderContents()
|
assert "\N{SNOWMAN}".encode("utf8") == soup.b.renderContents()
|
||||||
|
|
||||||
def test_repr(self):
|
def test_repr(self):
|
||||||
|
@ -159,7 +168,31 @@ class TestFormatters(SoupTest):
|
||||||
soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz <textarea> eee\nfff\t</textarea></div>")
|
soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz <textarea> eee\nfff\t</textarea></div>")
|
||||||
# Everything outside the <pre> tag is reformatted, but everything
|
# Everything outside the <pre> tag is reformatted, but everything
|
||||||
# inside is left alone.
|
# inside is left alone.
|
||||||
assert '<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n <textarea> eee\nfff\t</textarea>\n</div>' == soup.div.prettify()
|
assert '<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n <textarea> eee\nfff\t</textarea>\n</div>\n' == soup.div.prettify()
|
||||||
|
|
||||||
|
def test_prettify_handles_nested_string_literal_tags(self):
|
||||||
|
# Most of this markup is inside a <pre> tag, so prettify()
|
||||||
|
# only does three things to it:
|
||||||
|
# 1. Add a newline and a space between the <div> and the <pre>
|
||||||
|
# 2. Add a newline after the </pre>
|
||||||
|
# 3. Add a newline at the end.
|
||||||
|
#
|
||||||
|
# The contents of the <pre> tag are left completely alone. In
|
||||||
|
# particular, we don't start adding whitespace again once we
|
||||||
|
# encounter the first </pre> tag, because we know it's not
|
||||||
|
# the one that put us into string literal mode.
|
||||||
|
markup = """<div><pre><code>some
|
||||||
|
<script><pre>code</pre></script> for you
|
||||||
|
</code></pre></div>"""
|
||||||
|
|
||||||
|
expect = """<div>
|
||||||
|
<pre><code>some
|
||||||
|
<script><pre>code</pre></script> for you
|
||||||
|
</code></pre>
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
soup = self.soup(markup)
|
||||||
|
assert expect == soup.div.prettify()
|
||||||
|
|
||||||
def test_prettify_accepts_formatter_function(self):
|
def test_prettify_accepts_formatter_function(self):
|
||||||
soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser')
|
soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser')
|
||||||
|
@ -216,429 +249,6 @@ class TestFormatters(SoupTest):
|
||||||
assert soup.contents[0].name == 'pre'
|
assert soup.contents[0].name == 'pre'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not SOUP_SIEVE_PRESENT, reason="Soup Sieve not installed")
|
|
||||||
class TestCSSSelectors(SoupTest):
|
|
||||||
"""Test basic CSS selector functionality.
|
|
||||||
|
|
||||||
This functionality is implemented in soupsieve, which has a much
|
|
||||||
more comprehensive test suite, so this is basically an extra check
|
|
||||||
that soupsieve works as expected.
|
|
||||||
"""
|
|
||||||
|
|
||||||
HTML = """
|
|
||||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
|
|
||||||
"http://www.w3.org/TR/html4/strict.dtd">
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>The title</title>
|
|
||||||
<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag>
|
|
||||||
<div id="main" class="fancy">
|
|
||||||
<div id="inner">
|
|
||||||
<h1 id="header1">An H1</h1>
|
|
||||||
<p>Some text</p>
|
|
||||||
<p class="onep" id="p1">Some more text</p>
|
|
||||||
<h2 id="header2">An H2</h2>
|
|
||||||
<p class="class1 class2 class3" id="pmulti">Another</p>
|
|
||||||
<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
|
|
||||||
<h2 id="header3">Another H2</h2>
|
|
||||||
<a id="me" href="http://simonwillison.net/" rel="me">me</a>
|
|
||||||
<span class="s1">
|
|
||||||
<a href="#" id="s1a1">span1a1</a>
|
|
||||||
<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
|
|
||||||
<span class="span2">
|
|
||||||
<a href="#" id="s2a1">span2a1</a>
|
|
||||||
</span>
|
|
||||||
<span class="span3"></span>
|
|
||||||
<custom-dashed-tag class="dashed" id="dash2"/>
|
|
||||||
<div data-tag="dashedvalue" id="data1"/>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<x id="xid">
|
|
||||||
<z id="zida"/>
|
|
||||||
<z id="zidab"/>
|
|
||||||
<z id="zidac"/>
|
|
||||||
</x>
|
|
||||||
<y id="yid">
|
|
||||||
<z id="zidb"/>
|
|
||||||
</y>
|
|
||||||
<p lang="en" id="lang-en">English</p>
|
|
||||||
<p lang="en-gb" id="lang-en-gb">English UK</p>
|
|
||||||
<p lang="en-us" id="lang-en-us">English US</p>
|
|
||||||
<p lang="fr" id="lang-fr">French</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="footer">
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
self.soup = BeautifulSoup(self.HTML, 'html.parser')
|
|
||||||
|
|
||||||
def assert_selects(self, selector, expected_ids, **kwargs):
|
|
||||||
el_ids = [el['id'] for el in self.soup.select(selector, **kwargs)]
|
|
||||||
el_ids.sort()
|
|
||||||
expected_ids.sort()
|
|
||||||
assert expected_ids == el_ids, "Selector %s, expected [%s], got [%s]" % (
|
|
||||||
selector, ', '.join(expected_ids), ', '.join(el_ids)
|
|
||||||
)
|
|
||||||
|
|
||||||
assertSelect = assert_selects
|
|
||||||
|
|
||||||
def assert_select_multiple(self, *tests):
|
|
||||||
for selector, expected_ids in tests:
|
|
||||||
self.assert_selects(selector, expected_ids)
|
|
||||||
|
|
||||||
def test_one_tag_one(self):
|
|
||||||
els = self.soup.select('title')
|
|
||||||
assert len(els) == 1
|
|
||||||
assert els[0].name == 'title'
|
|
||||||
assert els[0].contents == ['The title']
|
|
||||||
|
|
||||||
def test_one_tag_many(self):
|
|
||||||
els = self.soup.select('div')
|
|
||||||
assert len(els) == 4
|
|
||||||
for div in els:
|
|
||||||
assert div.name == 'div'
|
|
||||||
|
|
||||||
el = self.soup.select_one('div')
|
|
||||||
assert 'main' == el['id']
|
|
||||||
|
|
||||||
def test_select_one_returns_none_if_no_match(self):
|
|
||||||
match = self.soup.select_one('nonexistenttag')
|
|
||||||
assert None == match
|
|
||||||
|
|
||||||
|
|
||||||
def test_tag_in_tag_one(self):
|
|
||||||
els = self.soup.select('div div')
|
|
||||||
self.assert_selects('div div', ['inner', 'data1'])
|
|
||||||
|
|
||||||
def test_tag_in_tag_many(self):
|
|
||||||
for selector in ('html div', 'html body div', 'body div'):
|
|
||||||
self.assert_selects(selector, ['data1', 'main', 'inner', 'footer'])
|
|
||||||
|
|
||||||
|
|
||||||
def test_limit(self):
|
|
||||||
self.assert_selects('html div', ['main'], limit=1)
|
|
||||||
self.assert_selects('html body div', ['inner', 'main'], limit=2)
|
|
||||||
self.assert_selects('body div', ['data1', 'main', 'inner', 'footer'],
|
|
||||||
limit=10)
|
|
||||||
|
|
||||||
def test_tag_no_match(self):
|
|
||||||
assert len(self.soup.select('del')) == 0
|
|
||||||
|
|
||||||
def test_invalid_tag(self):
|
|
||||||
with pytest.raises(SelectorSyntaxError):
|
|
||||||
self.soup.select('tag%t')
|
|
||||||
|
|
||||||
def test_select_dashed_tag_ids(self):
|
|
||||||
self.assert_selects('custom-dashed-tag', ['dash1', 'dash2'])
|
|
||||||
|
|
||||||
def test_select_dashed_by_id(self):
|
|
||||||
dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]')
|
|
||||||
assert dashed[0].name == 'custom-dashed-tag'
|
|
||||||
assert dashed[0]['id'] == 'dash2'
|
|
||||||
|
|
||||||
def test_dashed_tag_text(self):
|
|
||||||
assert self.soup.select('body > custom-dashed-tag')[0].text == 'Hello there.'
|
|
||||||
|
|
||||||
def test_select_dashed_matches_find_all(self):
|
|
||||||
assert self.soup.select('custom-dashed-tag') == self.soup.find_all('custom-dashed-tag')
|
|
||||||
|
|
||||||
def test_header_tags(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('h1', ['header1']),
|
|
||||||
('h2', ['header2', 'header3']),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_class_one(self):
|
|
||||||
for selector in ('.onep', 'p.onep', 'html p.onep'):
|
|
||||||
els = self.soup.select(selector)
|
|
||||||
assert len(els) == 1
|
|
||||||
assert els[0].name == 'p'
|
|
||||||
assert els[0]['class'] == ['onep']
|
|
||||||
|
|
||||||
def test_class_mismatched_tag(self):
|
|
||||||
els = self.soup.select('div.onep')
|
|
||||||
assert len(els) == 0
|
|
||||||
|
|
||||||
def test_one_id(self):
|
|
||||||
for selector in ('div#inner', '#inner', 'div div#inner'):
|
|
||||||
self.assert_selects(selector, ['inner'])
|
|
||||||
|
|
||||||
def test_bad_id(self):
|
|
||||||
els = self.soup.select('#doesnotexist')
|
|
||||||
assert len(els) == 0
|
|
||||||
|
|
||||||
def test_items_in_id(self):
|
|
||||||
els = self.soup.select('div#inner p')
|
|
||||||
assert len(els) == 3
|
|
||||||
for el in els:
|
|
||||||
assert el.name == 'p'
|
|
||||||
assert els[1]['class'] == ['onep']
|
|
||||||
assert not els[0].has_attr('class')
|
|
||||||
|
|
||||||
def test_a_bunch_of_emptys(self):
|
|
||||||
for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
|
|
||||||
assert len(self.soup.select(selector)) == 0
|
|
||||||
|
|
||||||
def test_multi_class_support(self):
|
|
||||||
for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
|
|
||||||
'.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
|
|
||||||
self.assert_selects(selector, ['pmulti'])
|
|
||||||
|
|
||||||
def test_multi_class_selection(self):
|
|
||||||
for selector in ('.class1.class3', '.class3.class2',
|
|
||||||
'.class1.class2.class3'):
|
|
||||||
self.assert_selects(selector, ['pmulti'])
|
|
||||||
|
|
||||||
def test_child_selector(self):
|
|
||||||
self.assert_selects('.s1 > a', ['s1a1', 's1a2'])
|
|
||||||
self.assert_selects('.s1 > a span', ['s1a2s1'])
|
|
||||||
|
|
||||||
def test_child_selector_id(self):
|
|
||||||
self.assert_selects('.s1 > a#s1a2 span', ['s1a2s1'])
|
|
||||||
|
|
||||||
def test_attribute_equals(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('p[class="onep"]', ['p1']),
|
|
||||||
('p[id="p1"]', ['p1']),
|
|
||||||
('[class="onep"]', ['p1']),
|
|
||||||
('[id="p1"]', ['p1']),
|
|
||||||
('link[rel="stylesheet"]', ['l1']),
|
|
||||||
('link[type="text/css"]', ['l1']),
|
|
||||||
('link[href="blah.css"]', ['l1']),
|
|
||||||
('link[href="no-blah.css"]', []),
|
|
||||||
('[rel="stylesheet"]', ['l1']),
|
|
||||||
('[type="text/css"]', ['l1']),
|
|
||||||
('[href="blah.css"]', ['l1']),
|
|
||||||
('[href="no-blah.css"]', []),
|
|
||||||
('p[href="no-blah.css"]', []),
|
|
||||||
('[href="no-blah.css"]', []),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_tilde(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('p[class~="class1"]', ['pmulti']),
|
|
||||||
('p[class~="class2"]', ['pmulti']),
|
|
||||||
('p[class~="class3"]', ['pmulti']),
|
|
||||||
('[class~="class1"]', ['pmulti']),
|
|
||||||
('[class~="class2"]', ['pmulti']),
|
|
||||||
('[class~="class3"]', ['pmulti']),
|
|
||||||
('a[rel~="friend"]', ['bob']),
|
|
||||||
('a[rel~="met"]', ['bob']),
|
|
||||||
('[rel~="friend"]', ['bob']),
|
|
||||||
('[rel~="met"]', ['bob']),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_startswith(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('[rel^="style"]', ['l1']),
|
|
||||||
('link[rel^="style"]', ['l1']),
|
|
||||||
('notlink[rel^="notstyle"]', []),
|
|
||||||
('[rel^="notstyle"]', []),
|
|
||||||
('link[rel^="notstyle"]', []),
|
|
||||||
('link[href^="bla"]', ['l1']),
|
|
||||||
('a[href^="http://"]', ['bob', 'me']),
|
|
||||||
('[href^="http://"]', ['bob', 'me']),
|
|
||||||
('[id^="p"]', ['pmulti', 'p1']),
|
|
||||||
('[id^="m"]', ['me', 'main']),
|
|
||||||
('div[id^="m"]', ['main']),
|
|
||||||
('a[id^="m"]', ['me']),
|
|
||||||
('div[data-tag^="dashed"]', ['data1'])
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_endswith(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('[href$=".css"]', ['l1']),
|
|
||||||
('link[href$=".css"]', ['l1']),
|
|
||||||
('link[id$="1"]', ['l1']),
|
|
||||||
('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']),
|
|
||||||
('div[id$="1"]', ['data1']),
|
|
||||||
('[id$="noending"]', []),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_contains(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
# From test_attribute_startswith
|
|
||||||
('[rel*="style"]', ['l1']),
|
|
||||||
('link[rel*="style"]', ['l1']),
|
|
||||||
('notlink[rel*="notstyle"]', []),
|
|
||||||
('[rel*="notstyle"]', []),
|
|
||||||
('link[rel*="notstyle"]', []),
|
|
||||||
('link[href*="bla"]', ['l1']),
|
|
||||||
('[href*="http://"]', ['bob', 'me']),
|
|
||||||
('[id*="p"]', ['pmulti', 'p1']),
|
|
||||||
('div[id*="m"]', ['main']),
|
|
||||||
('a[id*="m"]', ['me']),
|
|
||||||
# From test_attribute_endswith
|
|
||||||
('[href*=".css"]', ['l1']),
|
|
||||||
('link[href*=".css"]', ['l1']),
|
|
||||||
('link[id*="1"]', ['l1']),
|
|
||||||
('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']),
|
|
||||||
('div[id*="1"]', ['data1']),
|
|
||||||
('[id*="noending"]', []),
|
|
||||||
# New for this test
|
|
||||||
('[href*="."]', ['bob', 'me', 'l1']),
|
|
||||||
('a[href*="."]', ['bob', 'me']),
|
|
||||||
('link[href*="."]', ['l1']),
|
|
||||||
('div[id*="n"]', ['main', 'inner']),
|
|
||||||
('div[id*="nn"]', ['inner']),
|
|
||||||
('div[data-tag*="edval"]', ['data1'])
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_exact_or_hypen(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
|
|
||||||
('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
|
|
||||||
('p[lang|="fr"]', ['lang-fr']),
|
|
||||||
('p[lang|="gb"]', []),
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_attribute_exists(self):
|
|
||||||
self.assert_select_multiple(
|
|
||||||
('[rel]', ['l1', 'bob', 'me']),
|
|
||||||
('link[rel]', ['l1']),
|
|
||||||
('a[rel]', ['bob', 'me']),
|
|
||||||
('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
|
|
||||||
('p[class]', ['p1', 'pmulti']),
|
|
||||||
('[blah]', []),
|
|
||||||
('p[blah]', []),
|
|
||||||
('div[data-tag]', ['data1'])
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_quoted_space_in_selector_name(self):
|
|
||||||
html = """<div style="display: wrong">nope</div>
|
|
||||||
<div style="display: right">yes</div>
|
|
||||||
"""
|
|
||||||
soup = BeautifulSoup(html, 'html.parser')
|
|
||||||
[chosen] = soup.select('div[style="display: right"]')
|
|
||||||
assert "yes" == chosen.string
|
|
||||||
|
|
||||||
def test_unsupported_pseudoclass(self):
|
|
||||||
with pytest.raises(NotImplementedError):
|
|
||||||
self.soup.select("a:no-such-pseudoclass")
|
|
||||||
|
|
||||||
with pytest.raises(SelectorSyntaxError):
|
|
||||||
self.soup.select("a:nth-of-type(a)")
|
|
||||||
|
|
||||||
def test_nth_of_type(self):
|
|
||||||
# Try to select first paragraph
|
|
||||||
els = self.soup.select('div#inner p:nth-of-type(1)')
|
|
||||||
assert len(els) == 1
|
|
||||||
assert els[0].string == 'Some text'
|
|
||||||
|
|
||||||
# Try to select third paragraph
|
|
||||||
els = self.soup.select('div#inner p:nth-of-type(3)')
|
|
||||||
assert len(els) == 1
|
|
||||||
assert els[0].string == 'Another'
|
|
||||||
|
|
||||||
# Try to select (non-existent!) fourth paragraph
|
|
||||||
els = self.soup.select('div#inner p:nth-of-type(4)')
|
|
||||||
assert len(els) == 0
|
|
||||||
|
|
||||||
# Zero will select no tags.
|
|
||||||
els = self.soup.select('div p:nth-of-type(0)')
|
|
||||||
assert len(els) == 0
|
|
||||||
|
|
||||||
def test_nth_of_type_direct_descendant(self):
|
|
||||||
els = self.soup.select('div#inner > p:nth-of-type(1)')
|
|
||||||
assert len(els) == 1
|
|
||||||
assert els[0].string == 'Some text'
|
|
||||||
|
|
||||||
def test_id_child_selector_nth_of_type(self):
|
|
||||||
self.assert_selects('#inner > p:nth-of-type(2)', ['p1'])
|
|
||||||
|
|
||||||
def test_select_on_element(self):
|
|
||||||
# Other tests operate on the tree; this operates on an element
|
|
||||||
# within the tree.
|
|
||||||
inner = self.soup.find("div", id="main")
|
|
||||||
selected = inner.select("div")
|
|
||||||
# The <div id="inner"> tag was selected. The <div id="footer">
|
|
||||||
# tag was not.
|
|
||||||
self.assert_selects_ids(selected, ['inner', 'data1'])
|
|
||||||
|
|
||||||
def test_overspecified_child_id(self):
|
|
||||||
self.assert_selects(".fancy #inner", ['inner'])
|
|
||||||
self.assert_selects(".normal #inner", [])
|
|
||||||
|
|
||||||
def test_adjacent_sibling_selector(self):
|
|
||||||
self.assert_selects('#p1 + h2', ['header2'])
|
|
||||||
self.assert_selects('#p1 + h2 + p', ['pmulti'])
|
|
||||||
self.assert_selects('#p1 + #header2 + .class1', ['pmulti'])
|
|
||||||
assert [] == self.soup.select('#p1 + p')
|
|
||||||
|
|
||||||
def test_general_sibling_selector(self):
|
|
||||||
self.assert_selects('#p1 ~ h2', ['header2', 'header3'])
|
|
||||||
self.assert_selects('#p1 ~ #header2', ['header2'])
|
|
||||||
self.assert_selects('#p1 ~ h2 + a', ['me'])
|
|
||||||
self.assert_selects('#p1 ~ h2 + [rel="me"]', ['me'])
|
|
||||||
assert [] == self.soup.select('#inner ~ h2')
|
|
||||||
|
|
||||||
def test_dangling_combinator(self):
|
|
||||||
with pytest.raises(SelectorSyntaxError):
|
|
||||||
self.soup.select('h1 >')
|
|
||||||
|
|
||||||
def test_sibling_combinator_wont_select_same_tag_twice(self):
|
|
||||||
self.assert_selects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
|
|
||||||
|
|
||||||
# Test the selector grouping operator (the comma)
|
|
||||||
def test_multiple_select(self):
|
|
||||||
self.assert_selects('x, y', ['xid', 'yid'])
|
|
||||||
|
|
||||||
def test_multiple_select_with_no_space(self):
|
|
||||||
self.assert_selects('x,y', ['xid', 'yid'])
|
|
||||||
|
|
||||||
def test_multiple_select_with_more_space(self):
|
|
||||||
self.assert_selects('x, y', ['xid', 'yid'])
|
|
||||||
|
|
||||||
def test_multiple_select_duplicated(self):
|
|
||||||
self.assert_selects('x, x', ['xid'])
|
|
||||||
|
|
||||||
def test_multiple_select_sibling(self):
|
|
||||||
self.assert_selects('x, y ~ p[lang=fr]', ['xid', 'lang-fr'])
|
|
||||||
|
|
||||||
def test_multiple_select_tag_and_direct_descendant(self):
|
|
||||||
self.assert_selects('x, y > z', ['xid', 'zidb'])
|
|
||||||
|
|
||||||
def test_multiple_select_direct_descendant_and_tags(self):
|
|
||||||
self.assert_selects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
|
|
||||||
|
|
||||||
def test_multiple_select_indirect_descendant(self):
|
|
||||||
self.assert_selects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
|
|
||||||
|
|
||||||
def test_invalid_multiple_select(self):
|
|
||||||
with pytest.raises(SelectorSyntaxError):
|
|
||||||
self.soup.select(',x, y')
|
|
||||||
with pytest.raises(SelectorSyntaxError):
|
|
||||||
self.soup.select('x,,y')
|
|
||||||
|
|
||||||
def test_multiple_select_attrs(self):
|
|
||||||
self.assert_selects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb'])
|
|
||||||
|
|
||||||
def test_multiple_select_ids(self):
|
|
||||||
self.assert_selects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab'])
|
|
||||||
|
|
||||||
def test_multiple_select_nested(self):
|
|
||||||
self.assert_selects('body > div > x, y > z', ['xid', 'zidb'])
|
|
||||||
|
|
||||||
def test_select_duplicate_elements(self):
|
|
||||||
# When markup contains duplicate elements, a multiple select
|
|
||||||
# will find all of them.
|
|
||||||
markup = '<div class="c1"/><div class="c2"/><div class="c1"/>'
|
|
||||||
soup = BeautifulSoup(markup, 'html.parser')
|
|
||||||
selected = soup.select(".c1, .c2")
|
|
||||||
assert 3 == len(selected)
|
|
||||||
|
|
||||||
# Verify that find_all finds the same elements, though because
|
|
||||||
# of an implementation detail it finds them in a different
|
|
||||||
# order.
|
|
||||||
for element in soup.find_all(class_=['c1', 'c2']):
|
|
||||||
assert element in selected
|
|
||||||
|
|
||||||
|
|
||||||
class TestPersistence(SoupTest):
|
class TestPersistence(SoupTest):
|
||||||
"Testing features like pickle and deepcopy."
|
"Testing features like pickle and deepcopy."
|
||||||
|
|
||||||
|
@ -674,6 +284,18 @@ class TestPersistence(SoupTest):
|
||||||
copied = copy.deepcopy(self.tree)
|
copied = copy.deepcopy(self.tree)
|
||||||
assert copied.decode() == self.tree.decode()
|
assert copied.decode() == self.tree.decode()
|
||||||
|
|
||||||
|
def test_copy_deeply_nested_document(self):
|
||||||
|
# This test verifies that copy and deepcopy don't involve any
|
||||||
|
# recursive function calls. If they did, this test would
|
||||||
|
# overflow the Python interpreter stack.
|
||||||
|
limit = sys.getrecursionlimit() + 1
|
||||||
|
markup = "<span>" * limit
|
||||||
|
|
||||||
|
soup = self.soup(markup)
|
||||||
|
|
||||||
|
copied = copy.copy(soup)
|
||||||
|
copied = copy.deepcopy(soup)
|
||||||
|
|
||||||
def test_copy_preserves_encoding(self):
|
def test_copy_preserves_encoding(self):
|
||||||
soup = BeautifulSoup(b'<p> </p>', 'html.parser')
|
soup = BeautifulSoup(b'<p> </p>', 'html.parser')
|
||||||
encoding = soup.original_encoding
|
encoding = soup.original_encoding
|
||||||
|
|
|
@ -24,6 +24,7 @@ from bs4.builder import (
|
||||||
from bs4.element import (
|
from bs4.element import (
|
||||||
Comment,
|
Comment,
|
||||||
SoupStrainer,
|
SoupStrainer,
|
||||||
|
PYTHON_SPECIFIC_ENCODINGS,
|
||||||
Tag,
|
Tag,
|
||||||
NavigableString,
|
NavigableString,
|
||||||
)
|
)
|
||||||
|
@ -210,6 +211,47 @@ class TestConstructor(SoupTest):
|
||||||
assert [] == soup.string_container_stack
|
assert [] == soup.string_container_stack
|
||||||
|
|
||||||
|
|
||||||
|
class TestOutput(SoupTest):
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"eventual_encoding,actual_encoding", [
|
||||||
|
("utf-8", "utf-8"),
|
||||||
|
("utf-16", "utf-16"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_decode_xml_declaration(self, eventual_encoding, actual_encoding):
|
||||||
|
# Most of the time, calling decode() on an XML document will
|
||||||
|
# give you a document declaration that mentions the encoding
|
||||||
|
# you intend to use when encoding the document as a
|
||||||
|
# bytestring.
|
||||||
|
soup = self.soup("<tag></tag>")
|
||||||
|
soup.is_xml = True
|
||||||
|
assert (f'<?xml version="1.0" encoding="{actual_encoding}"?>\n<tag></tag>'
|
||||||
|
== soup.decode(eventual_encoding=eventual_encoding))
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"eventual_encoding", [x for x in PYTHON_SPECIFIC_ENCODINGS] + [None]
|
||||||
|
)
|
||||||
|
def test_decode_xml_declaration_with_missing_or_python_internal_eventual_encoding(self, eventual_encoding):
|
||||||
|
# But if you pass a Python internal encoding into decode(), or
|
||||||
|
# omit the eventual_encoding altogether, the document
|
||||||
|
# declaration won't mention any particular encoding.
|
||||||
|
soup = BeautifulSoup("<tag></tag>", "html.parser")
|
||||||
|
soup.is_xml = True
|
||||||
|
assert (f'<?xml version="1.0"?>\n<tag></tag>'
|
||||||
|
== soup.decode(eventual_encoding=eventual_encoding))
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
# BeautifulSoup subclasses Tag and extends the decode() method.
|
||||||
|
# Make sure the other Tag methods which call decode() call
|
||||||
|
# it correctly.
|
||||||
|
soup = self.soup("<tag></tag>")
|
||||||
|
assert b"<tag></tag>" == soup.encode(encoding="utf-8")
|
||||||
|
assert b"<tag></tag>" == soup.encode_contents(encoding="utf-8")
|
||||||
|
assert "<tag></tag>" == soup.decode_contents()
|
||||||
|
assert "<tag>\n</tag>\n" == soup.prettify()
|
||||||
|
|
||||||
|
|
||||||
class TestWarnings(SoupTest):
|
class TestWarnings(SoupTest):
|
||||||
# Note that some of the tests in this class create BeautifulSoup
|
# Note that some of the tests in this class create BeautifulSoup
|
||||||
# objects directly rather than using self.soup(). That's
|
# objects directly rather than using self.soup(). That's
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from .core import contents, where
|
from .core import contents, where
|
||||||
|
|
||||||
__all__ = ["contents", "where"]
|
__all__ = ["contents", "where"]
|
||||||
__version__ = "2022.12.07"
|
__version__ = "2023.07.22"
|
||||||
|
|
|
@ -791,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
|
||||||
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
|
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
|
||||||
# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
|
||||||
# Label: "Hongkong Post Root CA 1"
|
|
||||||
# Serial: 1000
|
|
||||||
# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
|
|
||||||
# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
|
|
||||||
# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
|
|
||||||
FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
|
|
||||||
Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
|
|
||||||
A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
|
|
||||||
b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
|
|
||||||
AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
|
|
||||||
jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
|
|
||||||
PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
|
|
||||||
ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
|
|
||||||
nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
|
|
||||||
q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
|
|
||||||
MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
|
|
||||||
mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
|
|
||||||
7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
|
|
||||||
oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
|
|
||||||
EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
|
|
||||||
fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
|
|
||||||
AmvZWg==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||||
# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||||
# Label: "SecureSign RootCA11"
|
# Label: "SecureSign RootCA11"
|
||||||
|
@ -1676,50 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
|
||||||
SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
|
SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
|
|
||||||
# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
|
|
||||||
# Label: "E-Tugra Certification Authority"
|
|
||||||
# Serial: 7667447206703254355
|
|
||||||
# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
|
|
||||||
# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
|
|
||||||
# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
|
|
||||||
BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
|
|
||||||
aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
|
|
||||||
BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
|
|
||||||
Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
|
|
||||||
MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
|
|
||||||
BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
|
|
||||||
em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
|
|
||||||
ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
|
|
||||||
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
|
|
||||||
B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
|
|
||||||
D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
|
|
||||||
Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
|
|
||||||
q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
|
|
||||||
k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
|
|
||||||
fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
|
|
||||||
dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
|
|
||||||
ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
|
|
||||||
zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
|
|
||||||
rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
|
|
||||||
U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
|
|
||||||
Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
|
|
||||||
XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
|
|
||||||
Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
|
|
||||||
HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
|
|
||||||
GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
|
|
||||||
77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
|
|
||||||
+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
|
|
||||||
vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
|
|
||||||
FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
|
|
||||||
yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
|
|
||||||
AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
|
|
||||||
y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
|
|
||||||
NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
||||||
# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
||||||
# Label: "T-TeleSec GlobalRoot Class 2"
|
# Label: "T-TeleSec GlobalRoot Class 2"
|
||||||
|
@ -4397,73 +4325,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
|
||||||
BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
|
BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
|
||||||
# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
|
||||||
# Label: "E-Tugra Global Root CA RSA v3"
|
|
||||||
# Serial: 75951268308633135324246244059508261641472512052
|
|
||||||
# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4
|
|
||||||
# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9
|
|
||||||
# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL
|
|
||||||
BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt
|
|
||||||
VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw
|
|
||||||
JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw
|
|
||||||
OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG
|
|
||||||
QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1
|
|
||||||
Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD
|
|
||||||
QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7
|
|
||||||
7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx
|
|
||||||
uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8
|
|
||||||
7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/
|
|
||||||
rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL
|
|
||||||
l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG
|
|
||||||
wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4
|
|
||||||
znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO
|
|
||||||
M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK
|
|
||||||
5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH
|
|
||||||
nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo
|
|
||||||
DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
|
|
||||||
AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy
|
|
||||||
tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL
|
|
||||||
BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ
|
|
||||||
6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18
|
|
||||||
Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ
|
|
||||||
3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk
|
|
||||||
vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9
|
|
||||||
9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ
|
|
||||||
mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA
|
|
||||||
VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF
|
|
||||||
9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM
|
|
||||||
moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8
|
|
||||||
bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
|
||||||
# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
|
||||||
# Label: "E-Tugra Global Root CA ECC v3"
|
|
||||||
# Serial: 218504919822255052842371958738296604628416471745
|
|
||||||
# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64
|
|
||||||
# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84
|
|
||||||
# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw
|
|
||||||
gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn
|
|
||||||
cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD
|
|
||||||
VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2
|
|
||||||
NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r
|
|
||||||
YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh
|
|
||||||
IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF
|
|
||||||
Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ
|
|
||||||
KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK
|
|
||||||
fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB
|
|
||||||
Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C
|
|
||||||
MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp
|
|
||||||
ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6
|
|
||||||
7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx
|
|
||||||
vmjkI6TZraE3
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||||
# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||||
# Label: "Security Communication RootCA3"
|
# Label: "Security Communication RootCA3"
|
||||||
|
@ -4525,3 +4386,250 @@ BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
|
||||||
9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
|
9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
|
||||||
be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
|
be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
|
||||||
|
# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
|
||||||
|
# Label: "BJCA Global Root CA1"
|
||||||
|
# Serial: 113562791157148395269083148143378328608
|
||||||
|
# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90
|
||||||
|
# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a
|
||||||
|
# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU
|
||||||
|
MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI
|
||||||
|
T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz
|
||||||
|
MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF
|
||||||
|
SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh
|
||||||
|
bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z
|
||||||
|
xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ
|
||||||
|
spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5
|
||||||
|
58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR
|
||||||
|
at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll
|
||||||
|
5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq
|
||||||
|
nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK
|
||||||
|
V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/
|
||||||
|
pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO
|
||||||
|
z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn
|
||||||
|
jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+
|
||||||
|
WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF
|
||||||
|
7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
|
||||||
|
AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4
|
||||||
|
YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli
|
||||||
|
awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u
|
||||||
|
+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88
|
||||||
|
X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN
|
||||||
|
SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo
|
||||||
|
P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI
|
||||||
|
+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz
|
||||||
|
znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9
|
||||||
|
eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2
|
||||||
|
YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy
|
||||||
|
r/6zcCwupvI=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
|
||||||
|
# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
|
||||||
|
# Label: "BJCA Global Root CA2"
|
||||||
|
# Serial: 58605626836079930195615843123109055211
|
||||||
|
# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c
|
||||||
|
# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6
|
||||||
|
# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw
|
||||||
|
CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ
|
||||||
|
VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy
|
||||||
|
MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ
|
||||||
|
TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS
|
||||||
|
b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B
|
||||||
|
IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+
|
||||||
|
+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK
|
||||||
|
sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
|
||||||
|
AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
|
||||||
|
94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
|
||||||
|
43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
|
||||||
|
# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
|
||||||
|
# Label: "Sectigo Public Server Authentication Root E46"
|
||||||
|
# Serial: 88989738453351742415770396670917916916
|
||||||
|
# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
|
||||||
|
# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
|
||||||
|
# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
|
||||||
|
CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
|
||||||
|
ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
|
||||||
|
MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
|
||||||
|
A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
|
||||||
|
ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
|
||||||
|
IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
|
||||||
|
WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
|
||||||
|
6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
|
||||||
|
Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
|
||||||
|
qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
|
||||||
|
4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
|
||||||
|
# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
|
||||||
|
# Label: "Sectigo Public Server Authentication Root R46"
|
||||||
|
# Serial: 156256931880233212765902055439220583700
|
||||||
|
# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
|
||||||
|
# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
|
||||||
|
# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
|
||||||
|
MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
|
||||||
|
Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
|
||||||
|
HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
|
||||||
|
MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
|
||||||
|
YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
|
||||||
|
AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
|
||||||
|
ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
|
||||||
|
SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
|
||||||
|
iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
|
||||||
|
ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
|
||||||
|
IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
|
||||||
|
VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
|
||||||
|
SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
|
||||||
|
+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
|
||||||
|
8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
|
||||||
|
HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
|
||||||
|
zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
|
||||||
|
AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
|
||||||
|
mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
|
||||||
|
YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
|
||||||
|
gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
|
||||||
|
Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
|
||||||
|
JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
|
||||||
|
DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
|
||||||
|
TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
|
||||||
|
dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
|
||||||
|
LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
|
||||||
|
0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
|
||||||
|
QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
|
||||||
|
# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
|
||||||
|
# Label: "SSL.com TLS RSA Root CA 2022"
|
||||||
|
# Serial: 148535279242832292258835760425842727825
|
||||||
|
# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
|
||||||
|
# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
|
||||||
|
# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
|
||||||
|
MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
|
||||||
|
DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
|
||||||
|
DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
|
||||||
|
b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
|
||||||
|
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
|
||||||
|
L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
|
||||||
|
t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
|
||||||
|
S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
|
||||||
|
PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
|
||||||
|
L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
|
||||||
|
R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
|
||||||
|
dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
|
||||||
|
+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
|
||||||
|
d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
|
||||||
|
AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
|
||||||
|
gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
|
||||||
|
BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
|
||||||
|
NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
|
||||||
|
hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
|
||||||
|
QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
|
||||||
|
R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
|
||||||
|
DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
|
||||||
|
P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
|
||||||
|
lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
|
||||||
|
bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
|
||||||
|
AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
|
||||||
|
r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
|
||||||
|
Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
|
||||||
|
98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
|
||||||
|
# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
|
||||||
|
# Label: "SSL.com TLS ECC Root CA 2022"
|
||||||
|
# Serial: 26605119622390491762507526719404364228
|
||||||
|
# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
|
||||||
|
# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
|
||||||
|
# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
|
||||||
|
CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
|
||||||
|
U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
|
||||||
|
MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
|
||||||
|
dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
|
||||||
|
ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
|
||||||
|
acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
|
||||||
|
SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
|
||||||
|
GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
|
||||||
|
uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
|
||||||
|
15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
|
||||||
|
b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
|
||||||
|
# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
|
||||||
|
# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
|
||||||
|
# Serial: 81873346711060652204712539181482831616
|
||||||
|
# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
|
||||||
|
# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
|
||||||
|
# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
|
||||||
|
LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
|
||||||
|
CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
|
||||||
|
MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
|
||||||
|
Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
|
||||||
|
zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
|
||||||
|
tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
|
||||||
|
AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
|
||||||
|
KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
|
||||||
|
aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
|
||||||
|
CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
|
||||||
|
9H1/IISpQuQo
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
|
||||||
|
# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
|
||||||
|
# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
|
||||||
|
# Serial: 111436099570196163832749341232207667876
|
||||||
|
# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
|
||||||
|
# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
|
||||||
|
# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
|
||||||
|
MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
|
||||||
|
MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
|
||||||
|
MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
|
||||||
|
QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
|
||||||
|
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
|
||||||
|
4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
|
||||||
|
Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
|
||||||
|
kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
|
||||||
|
GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
|
||||||
|
nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
|
||||||
|
3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
|
||||||
|
0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
|
||||||
|
geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
|
||||||
|
ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
|
||||||
|
c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
|
||||||
|
pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
|
||||||
|
dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
|
||||||
|
DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
|
||||||
|
4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
|
||||||
|
o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
|
||||||
|
qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
|
||||||
|
xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
|
||||||
|
rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
|
||||||
|
AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
|
||||||
|
0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
|
||||||
|
o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
|
||||||
|
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
||||||
|
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
|
@ -21,7 +21,7 @@ at <https://github.com/Ousret/charset_normalizer>.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .api import from_bytes, from_fp, from_path
|
from .api import from_bytes, from_fp, from_path, is_binary
|
||||||
from .legacy import detect
|
from .legacy import detect
|
||||||
from .models import CharsetMatch, CharsetMatches
|
from .models import CharsetMatch, CharsetMatches
|
||||||
from .utils import set_logging_handler
|
from .utils import set_logging_handler
|
||||||
|
@ -31,6 +31,7 @@ __all__ = (
|
||||||
"from_fp",
|
"from_fp",
|
||||||
"from_path",
|
"from_path",
|
||||||
"from_bytes",
|
"from_bytes",
|
||||||
|
"is_binary",
|
||||||
"detect",
|
"detect",
|
||||||
"CharsetMatch",
|
"CharsetMatch",
|
||||||
"CharsetMatches",
|
"CharsetMatches",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
from os import PathLike
|
from os import PathLike
|
||||||
from typing import Any, BinaryIO, List, Optional, Set
|
from typing import BinaryIO, List, Optional, Set, Union
|
||||||
|
|
||||||
from .cd import (
|
from .cd import (
|
||||||
coherence_ratio,
|
coherence_ratio,
|
||||||
|
@ -31,7 +31,7 @@ explain_handler.setFormatter(
|
||||||
|
|
||||||
|
|
||||||
def from_bytes(
|
def from_bytes(
|
||||||
sequences: bytes,
|
sequences: Union[bytes, bytearray],
|
||||||
steps: int = 5,
|
steps: int = 5,
|
||||||
chunk_size: int = 512,
|
chunk_size: int = 512,
|
||||||
threshold: float = 0.2,
|
threshold: float = 0.2,
|
||||||
|
@ -40,6 +40,7 @@ def from_bytes(
|
||||||
preemptive_behaviour: bool = True,
|
preemptive_behaviour: bool = True,
|
||||||
explain: bool = False,
|
explain: bool = False,
|
||||||
language_threshold: float = 0.1,
|
language_threshold: float = 0.1,
|
||||||
|
enable_fallback: bool = True,
|
||||||
) -> CharsetMatches:
|
) -> CharsetMatches:
|
||||||
"""
|
"""
|
||||||
Given a raw bytes sequence, return the best possibles charset usable to render str objects.
|
Given a raw bytes sequence, return the best possibles charset usable to render str objects.
|
||||||
|
@ -361,7 +362,8 @@ def from_bytes(
|
||||||
)
|
)
|
||||||
# Preparing those fallbacks in case we got nothing.
|
# Preparing those fallbacks in case we got nothing.
|
||||||
if (
|
if (
|
||||||
encoding_iana in ["ascii", "utf_8", specified_encoding]
|
enable_fallback
|
||||||
|
and encoding_iana in ["ascii", "utf_8", specified_encoding]
|
||||||
and not lazy_str_hard_failure
|
and not lazy_str_hard_failure
|
||||||
):
|
):
|
||||||
fallback_entry = CharsetMatch(
|
fallback_entry = CharsetMatch(
|
||||||
|
@ -507,6 +509,7 @@ def from_fp(
|
||||||
preemptive_behaviour: bool = True,
|
preemptive_behaviour: bool = True,
|
||||||
explain: bool = False,
|
explain: bool = False,
|
||||||
language_threshold: float = 0.1,
|
language_threshold: float = 0.1,
|
||||||
|
enable_fallback: bool = True,
|
||||||
) -> CharsetMatches:
|
) -> CharsetMatches:
|
||||||
"""
|
"""
|
||||||
Same thing than the function from_bytes but using a file pointer that is already ready.
|
Same thing than the function from_bytes but using a file pointer that is already ready.
|
||||||
|
@ -522,11 +525,12 @@ def from_fp(
|
||||||
preemptive_behaviour,
|
preemptive_behaviour,
|
||||||
explain,
|
explain,
|
||||||
language_threshold,
|
language_threshold,
|
||||||
|
enable_fallback,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def from_path(
|
def from_path(
|
||||||
path: "PathLike[Any]",
|
path: Union[str, bytes, PathLike], # type: ignore[type-arg]
|
||||||
steps: int = 5,
|
steps: int = 5,
|
||||||
chunk_size: int = 512,
|
chunk_size: int = 512,
|
||||||
threshold: float = 0.20,
|
threshold: float = 0.20,
|
||||||
|
@ -535,6 +539,7 @@ def from_path(
|
||||||
preemptive_behaviour: bool = True,
|
preemptive_behaviour: bool = True,
|
||||||
explain: bool = False,
|
explain: bool = False,
|
||||||
language_threshold: float = 0.1,
|
language_threshold: float = 0.1,
|
||||||
|
enable_fallback: bool = True,
|
||||||
) -> CharsetMatches:
|
) -> CharsetMatches:
|
||||||
"""
|
"""
|
||||||
Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
|
Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
|
||||||
|
@ -551,4 +556,71 @@ def from_path(
|
||||||
preemptive_behaviour,
|
preemptive_behaviour,
|
||||||
explain,
|
explain,
|
||||||
language_threshold,
|
language_threshold,
|
||||||
|
enable_fallback,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_binary(
|
||||||
|
fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg]
|
||||||
|
steps: int = 5,
|
||||||
|
chunk_size: int = 512,
|
||||||
|
threshold: float = 0.20,
|
||||||
|
cp_isolation: Optional[List[str]] = None,
|
||||||
|
cp_exclusion: Optional[List[str]] = None,
|
||||||
|
preemptive_behaviour: bool = True,
|
||||||
|
explain: bool = False,
|
||||||
|
language_threshold: float = 0.1,
|
||||||
|
enable_fallback: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string.
|
||||||
|
Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match
|
||||||
|
are disabled to be stricter around ASCII-compatible but unlikely to be a string.
|
||||||
|
"""
|
||||||
|
if isinstance(fp_or_path_or_payload, (str, PathLike)):
|
||||||
|
guesses = from_path(
|
||||||
|
fp_or_path_or_payload,
|
||||||
|
steps=steps,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
threshold=threshold,
|
||||||
|
cp_isolation=cp_isolation,
|
||||||
|
cp_exclusion=cp_exclusion,
|
||||||
|
preemptive_behaviour=preemptive_behaviour,
|
||||||
|
explain=explain,
|
||||||
|
language_threshold=language_threshold,
|
||||||
|
enable_fallback=enable_fallback,
|
||||||
|
)
|
||||||
|
elif isinstance(
|
||||||
|
fp_or_path_or_payload,
|
||||||
|
(
|
||||||
|
bytes,
|
||||||
|
bytearray,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
guesses = from_bytes(
|
||||||
|
fp_or_path_or_payload,
|
||||||
|
steps=steps,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
threshold=threshold,
|
||||||
|
cp_isolation=cp_isolation,
|
||||||
|
cp_exclusion=cp_exclusion,
|
||||||
|
preemptive_behaviour=preemptive_behaviour,
|
||||||
|
explain=explain,
|
||||||
|
language_threshold=language_threshold,
|
||||||
|
enable_fallback=enable_fallback,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
guesses = from_fp(
|
||||||
|
fp_or_path_or_payload,
|
||||||
|
steps=steps,
|
||||||
|
chunk_size=chunk_size,
|
||||||
|
threshold=threshold,
|
||||||
|
cp_isolation=cp_isolation,
|
||||||
|
cp_exclusion=cp_exclusion,
|
||||||
|
preemptive_behaviour=preemptive_behaviour,
|
||||||
|
explain=explain,
|
||||||
|
language_threshold=language_threshold,
|
||||||
|
enable_fallback=enable_fallback,
|
||||||
|
)
|
||||||
|
|
||||||
|
return not guesses
|
||||||
|
|
|
@ -294,12 +294,23 @@ class SuperWeirdWordPlugin(MessDetectorPlugin):
|
||||||
if buffer_length >= 4:
|
if buffer_length >= 4:
|
||||||
if self._buffer_accent_count / buffer_length > 0.34:
|
if self._buffer_accent_count / buffer_length > 0.34:
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
# Word/Buffer ending with a upper case accentuated letter are so rare,
|
# Word/Buffer ending with an upper case accentuated letter are so rare,
|
||||||
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
# that we will consider them all as suspicious. Same weight as foreign_long suspicious.
|
||||||
if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper():
|
if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper():
|
||||||
self._foreign_long_count += 1
|
self._foreign_long_count += 1
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
if buffer_length >= 24 and self._foreign_long_watch:
|
if buffer_length >= 24 and self._foreign_long_watch:
|
||||||
|
camel_case_dst = [
|
||||||
|
i
|
||||||
|
for c, i in zip(self._buffer, range(0, buffer_length))
|
||||||
|
if c.isupper()
|
||||||
|
]
|
||||||
|
probable_camel_cased: bool = False
|
||||||
|
|
||||||
|
if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3):
|
||||||
|
probable_camel_cased = True
|
||||||
|
|
||||||
|
if not probable_camel_cased:
|
||||||
self._foreign_long_count += 1
|
self._foreign_long_count += 1
|
||||||
self._is_current_word_bad = True
|
self._is_current_word_bad = True
|
||||||
|
|
||||||
|
|
|
@ -120,12 +120,12 @@ def is_emoticon(character: str) -> bool:
|
||||||
|
|
||||||
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
||||||
def is_separator(character: str) -> bool:
|
def is_separator(character: str) -> bool:
|
||||||
if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}:
|
if character.isspace() or character in {"|", "+", "<", ">"}:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
character_category: str = unicodedata.category(character)
|
character_category: str = unicodedata.category(character)
|
||||||
|
|
||||||
return "Z" in character_category
|
return "Z" in character_category or character_category in {"Po", "Pd", "Pc"}
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
Expose version
|
Expose version
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "3.1.0"
|
__version__ = "3.2.0"
|
||||||
VERSION = __version__.split(".")
|
VERSION = __version__.split(".")
|
||||||
|
|
|
@ -24,6 +24,7 @@ SYS_PLATFORM = platform.system()
|
||||||
IS_WINDOWS = SYS_PLATFORM == 'Windows'
|
IS_WINDOWS = SYS_PLATFORM == 'Windows'
|
||||||
IS_LINUX = SYS_PLATFORM == 'Linux'
|
IS_LINUX = SYS_PLATFORM == 'Linux'
|
||||||
IS_MACOS = SYS_PLATFORM == 'Darwin'
|
IS_MACOS = SYS_PLATFORM == 'Darwin'
|
||||||
|
IS_SOLARIS = SYS_PLATFORM == 'SunOS'
|
||||||
|
|
||||||
PLATFORM_ARCH = platform.machine()
|
PLATFORM_ARCH = platform.machine()
|
||||||
IS_PPC = PLATFORM_ARCH.startswith('ppc')
|
IS_PPC = PLATFORM_ARCH.startswith('ppc')
|
||||||
|
|
|
@ -10,6 +10,7 @@ SYS_PLATFORM: str
|
||||||
IS_WINDOWS: bool
|
IS_WINDOWS: bool
|
||||||
IS_LINUX: bool
|
IS_LINUX: bool
|
||||||
IS_MACOS: bool
|
IS_MACOS: bool
|
||||||
|
IS_SOLARIS: bool
|
||||||
PLATFORM_ARCH: str
|
PLATFORM_ARCH: str
|
||||||
IS_PPC: bool
|
IS_PPC: bool
|
||||||
|
|
||||||
|
|
|
@ -274,8 +274,7 @@ class ConnectionManager:
|
||||||
# One of the reason on why a socket could cause an error
|
# One of the reason on why a socket could cause an error
|
||||||
# is that the socket is already closed, ignore the
|
# is that the socket is already closed, ignore the
|
||||||
# socket error if we try to close it at this point.
|
# socket error if we try to close it at this point.
|
||||||
# This is equivalent to OSError in Py3
|
with suppress(OSError):
|
||||||
with suppress(socket.error):
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
def _from_server_socket(self, server_socket): # noqa: C901 # FIXME
|
def _from_server_socket(self, server_socket): # noqa: C901 # FIXME
|
||||||
|
@ -308,7 +307,7 @@ class ConnectionManager:
|
||||||
wfile = mf(s, 'wb', io.DEFAULT_BUFFER_SIZE)
|
wfile = mf(s, 'wb', io.DEFAULT_BUFFER_SIZE)
|
||||||
try:
|
try:
|
||||||
wfile.write(''.join(buf).encode('ISO-8859-1'))
|
wfile.write(''.join(buf).encode('ISO-8859-1'))
|
||||||
except socket.error as ex:
|
except OSError as ex:
|
||||||
if ex.args[0] not in errors.socket_errors_to_ignore:
|
if ex.args[0] not in errors.socket_errors_to_ignore:
|
||||||
raise
|
raise
|
||||||
return
|
return
|
||||||
|
@ -343,7 +342,7 @@ class ConnectionManager:
|
||||||
# notice keyboard interrupts on Win32, which don't interrupt
|
# notice keyboard interrupts on Win32, which don't interrupt
|
||||||
# accept() by default
|
# accept() by default
|
||||||
return
|
return
|
||||||
except socket.error as ex:
|
except OSError as ex:
|
||||||
if self.server.stats['Enabled']:
|
if self.server.stats['Enabled']:
|
||||||
self.server.stats['Socket Errors'] += 1
|
self.server.stats['Socket Errors'] += 1
|
||||||
if ex.args[0] in errors.socket_error_eintr:
|
if ex.args[0] in errors.socket_error_eintr:
|
||||||
|
|
|
@ -77,9 +77,4 @@ Refs:
|
||||||
* https://docs.microsoft.com/windows/win32/api/winsock/nf-winsock-shutdown
|
* https://docs.microsoft.com/windows/win32/api/winsock/nf-winsock-shutdown
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try: # py3
|
acceptable_sock_shutdown_exceptions = (BrokenPipeError, ConnectionResetError)
|
||||||
acceptable_sock_shutdown_exceptions = (
|
|
||||||
BrokenPipeError, ConnectionResetError,
|
|
||||||
)
|
|
||||||
except NameError: # py2
|
|
||||||
acceptable_sock_shutdown_exceptions = ()
|
|
||||||
|
|
|
@ -1572,6 +1572,9 @@ class HTTPServer:
|
||||||
``PEERCREDS``-provided IDs.
|
``PEERCREDS``-provided IDs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
reuse_port = False
|
||||||
|
"""If True, set SO_REUSEPORT on the socket."""
|
||||||
|
|
||||||
keep_alive_conn_limit = 10
|
keep_alive_conn_limit = 10
|
||||||
"""Maximum number of waiting keep-alive connections that will be kept open.
|
"""Maximum number of waiting keep-alive connections that will be kept open.
|
||||||
|
|
||||||
|
@ -1581,6 +1584,7 @@ class HTTPServer:
|
||||||
self, bind_addr, gateway,
|
self, bind_addr, gateway,
|
||||||
minthreads=10, maxthreads=-1, server_name=None,
|
minthreads=10, maxthreads=-1, server_name=None,
|
||||||
peercreds_enabled=False, peercreds_resolve_enabled=False,
|
peercreds_enabled=False, peercreds_resolve_enabled=False,
|
||||||
|
reuse_port=False,
|
||||||
):
|
):
|
||||||
"""Initialize HTTPServer instance.
|
"""Initialize HTTPServer instance.
|
||||||
|
|
||||||
|
@ -1591,6 +1595,8 @@ class HTTPServer:
|
||||||
maxthreads (int): maximum number of threads for HTTP thread pool
|
maxthreads (int): maximum number of threads for HTTP thread pool
|
||||||
server_name (str): web server name to be advertised via Server
|
server_name (str): web server name to be advertised via Server
|
||||||
HTTP header
|
HTTP header
|
||||||
|
reuse_port (bool): if True SO_REUSEPORT option would be set to
|
||||||
|
socket
|
||||||
"""
|
"""
|
||||||
self.bind_addr = bind_addr
|
self.bind_addr = bind_addr
|
||||||
self.gateway = gateway
|
self.gateway = gateway
|
||||||
|
@ -1606,6 +1612,7 @@ class HTTPServer:
|
||||||
self.peercreds_resolve_enabled = (
|
self.peercreds_resolve_enabled = (
|
||||||
peercreds_resolve_enabled and peercreds_enabled
|
peercreds_resolve_enabled and peercreds_enabled
|
||||||
)
|
)
|
||||||
|
self.reuse_port = reuse_port
|
||||||
self.clear_stats()
|
self.clear_stats()
|
||||||
|
|
||||||
def clear_stats(self):
|
def clear_stats(self):
|
||||||
|
@ -1880,6 +1887,7 @@ class HTTPServer:
|
||||||
self.bind_addr,
|
self.bind_addr,
|
||||||
family, type, proto,
|
family, type, proto,
|
||||||
self.nodelay, self.ssl_adapter,
|
self.nodelay, self.ssl_adapter,
|
||||||
|
self.reuse_port,
|
||||||
)
|
)
|
||||||
sock = self.socket = self.bind_socket(sock, self.bind_addr)
|
sock = self.socket = self.bind_socket(sock, self.bind_addr)
|
||||||
self.bind_addr = self.resolve_real_bind_addr(sock)
|
self.bind_addr = self.resolve_real_bind_addr(sock)
|
||||||
|
@ -1911,9 +1919,6 @@ class HTTPServer:
|
||||||
'remove() argument 1 must be encoded '
|
'remove() argument 1 must be encoded '
|
||||||
'string without null bytes, not unicode'
|
'string without null bytes, not unicode'
|
||||||
not in err_msg
|
not in err_msg
|
||||||
and 'embedded NUL character' not in err_msg # py34
|
|
||||||
and 'argument must be a '
|
|
||||||
'string without NUL characters' not in err_msg # pypy2
|
|
||||||
):
|
):
|
||||||
raise
|
raise
|
||||||
except ValueError as val_err:
|
except ValueError as val_err:
|
||||||
|
@ -1931,6 +1936,7 @@ class HTTPServer:
|
||||||
bind_addr=bind_addr,
|
bind_addr=bind_addr,
|
||||||
family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0,
|
family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0,
|
||||||
nodelay=self.nodelay, ssl_adapter=self.ssl_adapter,
|
nodelay=self.nodelay, ssl_adapter=self.ssl_adapter,
|
||||||
|
reuse_port=self.reuse_port,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1971,7 +1977,36 @@ class HTTPServer:
|
||||||
return sock
|
return sock
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter):
|
def _make_socket_reusable(socket_, bind_addr):
|
||||||
|
host, port = bind_addr[:2]
|
||||||
|
IS_EPHEMERAL_PORT = port == 0
|
||||||
|
|
||||||
|
if socket_.family not in (socket.AF_INET, socket.AF_INET6):
|
||||||
|
raise ValueError('Cannot reuse a non-IP socket')
|
||||||
|
|
||||||
|
if IS_EPHEMERAL_PORT:
|
||||||
|
raise ValueError('Cannot reuse an ephemeral port (0)')
|
||||||
|
|
||||||
|
# Most BSD kernels implement SO_REUSEPORT the way that only the
|
||||||
|
# latest listener can read from socket. Some of BSD kernels also
|
||||||
|
# have SO_REUSEPORT_LB that works similarly to SO_REUSEPORT
|
||||||
|
# in Linux.
|
||||||
|
if hasattr(socket, 'SO_REUSEPORT_LB'):
|
||||||
|
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT_LB, 1)
|
||||||
|
elif hasattr(socket, 'SO_REUSEPORT'):
|
||||||
|
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
|
elif IS_WINDOWS:
|
||||||
|
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(
|
||||||
|
'Current platform does not support port reuse',
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def prepare_socket(
|
||||||
|
cls, bind_addr, family, type, proto, nodelay, ssl_adapter,
|
||||||
|
reuse_port=False,
|
||||||
|
):
|
||||||
"""Create and prepare the socket object."""
|
"""Create and prepare the socket object."""
|
||||||
sock = socket.socket(family, type, proto)
|
sock = socket.socket(family, type, proto)
|
||||||
connections.prevent_socket_inheritance(sock)
|
connections.prevent_socket_inheritance(sock)
|
||||||
|
@ -1979,6 +2014,9 @@ class HTTPServer:
|
||||||
host, port = bind_addr[:2]
|
host, port = bind_addr[:2]
|
||||||
IS_EPHEMERAL_PORT = port == 0
|
IS_EPHEMERAL_PORT = port == 0
|
||||||
|
|
||||||
|
if reuse_port:
|
||||||
|
cls._make_socket_reusable(socket_=sock, bind_addr=bind_addr)
|
||||||
|
|
||||||
if not (IS_WINDOWS or IS_EPHEMERAL_PORT):
|
if not (IS_WINDOWS or IS_EPHEMERAL_PORT):
|
||||||
"""Enable SO_REUSEADDR for the current socket.
|
"""Enable SO_REUSEADDR for the current socket.
|
||||||
|
|
||||||
|
|
|
@ -130,9 +130,10 @@ class HTTPServer:
|
||||||
ssl_adapter: Any
|
ssl_adapter: Any
|
||||||
peercreds_enabled: bool
|
peercreds_enabled: bool
|
||||||
peercreds_resolve_enabled: bool
|
peercreds_resolve_enabled: bool
|
||||||
|
reuse_port: bool
|
||||||
keep_alive_conn_limit: int
|
keep_alive_conn_limit: int
|
||||||
requests: Any
|
requests: Any
|
||||||
def __init__(self, bind_addr, gateway, minthreads: int = ..., maxthreads: int = ..., server_name: Any | None = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
|
def __init__(self, bind_addr, gateway, minthreads: int = ..., maxthreads: int = ..., server_name: Any | None = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ..., reuse_port: bool = ...) -> None: ...
|
||||||
stats: Any
|
stats: Any
|
||||||
def clear_stats(self): ...
|
def clear_stats(self): ...
|
||||||
def runtime(self): ...
|
def runtime(self): ...
|
||||||
|
@ -152,7 +153,9 @@ class HTTPServer:
|
||||||
def bind(self, family, type, proto: int = ...): ...
|
def bind(self, family, type, proto: int = ...): ...
|
||||||
def bind_unix_socket(self, bind_addr): ...
|
def bind_unix_socket(self, bind_addr): ...
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter): ...
|
def _make_socket_reusable(socket_, bind_addr) -> None: ...
|
||||||
|
@classmethod
|
||||||
|
def prepare_socket(cls, bind_addr, family, type, proto, nodelay, ssl_adapter, reuse_port: bool = ...): ...
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def bind_socket(socket_, bind_addr): ...
|
def bind_socket(socket_, bind_addr): ...
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from abc import abstractmethod
|
from abc import abstractmethod, ABCMeta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
class Adapter():
|
class Adapter(metaclass=ABCMeta):
|
||||||
certificate: Any
|
certificate: Any
|
||||||
private_key: Any
|
private_key: Any
|
||||||
certificate_chain: Any
|
certificate_chain: Any
|
||||||
|
|
|
@ -4,11 +4,7 @@ Contains hooks, which are tightly bound to the Cheroot framework
|
||||||
itself, useless for end-users' app testing.
|
itself, useless for end-users' app testing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
pytest_version = tuple(map(int, pytest.__version__.split('.')))
|
pytest_version = tuple(map(int, pytest.__version__.split('.')))
|
||||||
|
@ -45,16 +41,3 @@ def pytest_load_initial_conftests(early_config, parser, args):
|
||||||
'type=SocketKind.SOCK_STREAM, proto=.:'
|
'type=SocketKind.SOCK_STREAM, proto=.:'
|
||||||
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
|
'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
|
||||||
))
|
))
|
||||||
|
|
||||||
if six.PY2:
|
|
||||||
return
|
|
||||||
|
|
||||||
# NOTE: `ResourceWarning` does not exist under Python 2 and so using
|
|
||||||
# NOTE: it in warning filters results in an `_OptionError` exception
|
|
||||||
# NOTE: being raised.
|
|
||||||
early_config._inicache['filterwarnings'].extend((
|
|
||||||
# FIXME: Try to figure out what causes this and ensure that the socket
|
|
||||||
# FIXME: gets closed.
|
|
||||||
'ignore:unclosed <socket.socket fd=:ResourceWarning',
|
|
||||||
'ignore:unclosed <ssl.SSLSocket fd=:ResourceWarning',
|
|
||||||
))
|
|
||||||
|
|
|
@ -1218,8 +1218,7 @@ def test_No_CRLF(test_client, invalid_terminator):
|
||||||
# Initialize a persistent HTTP connection
|
# Initialize a persistent HTTP connection
|
||||||
conn = test_client.get_connection()
|
conn = test_client.get_connection()
|
||||||
|
|
||||||
# (b'%s' % b'') is not supported in Python 3.4, so just use bytes.join()
|
conn.send(b'GET /hello HTTP/1.1%s' % invalid_terminator)
|
||||||
conn.send(b''.join((b'GET /hello HTTP/1.1', invalid_terminator)))
|
|
||||||
response = conn.response_class(conn.sock, method='GET')
|
response = conn.response_class(conn.sock, method='GET')
|
||||||
response.begin()
|
response.begin()
|
||||||
actual_resp_body = response.read()
|
actual_resp_body = response.read()
|
||||||
|
|
|
@ -69,11 +69,7 @@ class HelloController(helper.Controller):
|
||||||
|
|
||||||
|
|
||||||
def _get_http_response(connection, method='GET'):
|
def _get_http_response(connection, method='GET'):
|
||||||
c = connection
|
return connection.response_class(connection.sock, method=method)
|
||||||
kwargs = {'strict': c.strict} if hasattr(c, 'strict') else {}
|
|
||||||
# Python 3.2 removed the 'strict' feature, saying:
|
|
||||||
# "http.client now always assumes HTTP/1.x compliant servers."
|
|
||||||
return c.response_class(c.sock, method=method, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|
|
@ -4,7 +4,7 @@ import pytest
|
||||||
|
|
||||||
from cheroot import errors
|
from cheroot import errors
|
||||||
|
|
||||||
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS # noqa: WPS130
|
from .._compat import IS_LINUX, IS_MACOS, IS_SOLARIS, IS_WINDOWS # noqa: WPS130
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -18,6 +18,7 @@ from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS # noqa: WPS130
|
||||||
),
|
),
|
||||||
(91, 11, 32) if IS_LINUX else
|
(91, 11, 32) if IS_LINUX else
|
||||||
(32, 35, 41) if IS_MACOS else
|
(32, 35, 41) if IS_MACOS else
|
||||||
|
(98, 11, 32) if IS_SOLARIS else
|
||||||
(32, 10041, 11, 10035) if IS_WINDOWS else
|
(32, 10041, 11, 10035) if IS_WINDOWS else
|
||||||
(),
|
(),
|
||||||
),
|
),
|
||||||
|
|
|
@ -5,6 +5,7 @@ import queue
|
||||||
import socket
|
import socket
|
||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
|
import types
|
||||||
import uuid
|
import uuid
|
||||||
import urllib.parse # noqa: WPS301
|
import urllib.parse # noqa: WPS301
|
||||||
|
|
||||||
|
@ -17,6 +18,7 @@ from pypytools.gc.custom import DefaultGc
|
||||||
from .._compat import bton, ntob
|
from .._compat import bton, ntob
|
||||||
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS, SYS_PLATFORM
|
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS, SYS_PLATFORM
|
||||||
from ..server import IS_UID_GID_RESOLVABLE, Gateway, HTTPServer
|
from ..server import IS_UID_GID_RESOLVABLE, Gateway, HTTPServer
|
||||||
|
from ..workers.threadpool import ThreadPool
|
||||||
from ..testing import (
|
from ..testing import (
|
||||||
ANY_INTERFACE_IPV4,
|
ANY_INTERFACE_IPV4,
|
||||||
ANY_INTERFACE_IPV6,
|
ANY_INTERFACE_IPV6,
|
||||||
|
@ -254,6 +256,7 @@ def peercreds_enabled_server(http_server, unix_sock_file):
|
||||||
|
|
||||||
@unix_only_sock_test
|
@unix_only_sock_test
|
||||||
@non_macos_sock_test
|
@non_macos_sock_test
|
||||||
|
@pytest.mark.flaky(reruns=3, reruns_delay=2)
|
||||||
def test_peercreds_unix_sock(http_request_timeout, peercreds_enabled_server):
|
def test_peercreds_unix_sock(http_request_timeout, peercreds_enabled_server):
|
||||||
"""Check that ``PEERCRED`` lookup works when enabled."""
|
"""Check that ``PEERCRED`` lookup works when enabled."""
|
||||||
httpserver = peercreds_enabled_server
|
httpserver = peercreds_enabled_server
|
||||||
|
@ -370,6 +373,33 @@ def test_high_number_of_file_descriptors(native_server_client, resource_limit):
|
||||||
assert any(fn >= resource_limit for fn in native_process_conn.filenos)
|
assert any(fn >= resource_limit for fn in native_process_conn.filenos)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not hasattr(socket, 'SO_REUSEPORT'),
|
||||||
|
reason='socket.SO_REUSEPORT is not supported on this platform',
|
||||||
|
)
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'ip_addr',
|
||||||
|
(
|
||||||
|
ANY_INTERFACE_IPV4,
|
||||||
|
ANY_INTERFACE_IPV6,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_reuse_port(http_server, ip_addr, mocker):
|
||||||
|
"""Check that port initialized externally can be reused."""
|
||||||
|
family = socket.getaddrinfo(ip_addr, EPHEMERAL_PORT)[0][0]
|
||||||
|
s = socket.socket(family)
|
||||||
|
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
|
s.bind((ip_addr, EPHEMERAL_PORT))
|
||||||
|
server = HTTPServer(
|
||||||
|
bind_addr=s.getsockname()[:2], gateway=Gateway, reuse_port=True,
|
||||||
|
)
|
||||||
|
spy = mocker.spy(server, 'prepare')
|
||||||
|
server.prepare()
|
||||||
|
server.stop()
|
||||||
|
s.close()
|
||||||
|
assert spy.spy_exception is None
|
||||||
|
|
||||||
|
|
||||||
ISSUE511 = IS_MACOS
|
ISSUE511 = IS_MACOS
|
||||||
|
|
||||||
|
|
||||||
|
@ -439,3 +469,90 @@ def many_open_sockets(request, resource_limit):
|
||||||
# Close our open resources
|
# Close our open resources
|
||||||
for test_socket in test_sockets:
|
for test_socket in test_sockets:
|
||||||
test_socket.close()
|
test_socket.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('minthreads', 'maxthreads', 'inited_maxthreads'),
|
||||||
|
(
|
||||||
|
(
|
||||||
|
# NOTE: The docstring only mentions -1 to mean "no max", but other
|
||||||
|
# NOTE: negative numbers should also work.
|
||||||
|
1,
|
||||||
|
-2,
|
||||||
|
float('inf'),
|
||||||
|
),
|
||||||
|
(1, -1, float('inf')),
|
||||||
|
(1, 1, 1),
|
||||||
|
(1, 2, 2),
|
||||||
|
(1, float('inf'), float('inf')),
|
||||||
|
(2, -2, float('inf')),
|
||||||
|
(2, -1, float('inf')),
|
||||||
|
(2, 2, 2),
|
||||||
|
(2, float('inf'), float('inf')),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_threadpool_threadrange_set(minthreads, maxthreads, inited_maxthreads):
|
||||||
|
"""Test setting the number of threads in a ThreadPool.
|
||||||
|
|
||||||
|
The ThreadPool should properly set the min+max number of the threads to use
|
||||||
|
in the pool if those limits are valid.
|
||||||
|
"""
|
||||||
|
tp = ThreadPool(
|
||||||
|
server=None,
|
||||||
|
min=minthreads,
|
||||||
|
max=maxthreads,
|
||||||
|
)
|
||||||
|
assert tp.min == minthreads
|
||||||
|
assert tp.max == inited_maxthreads
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('minthreads', 'maxthreads', 'error'),
|
||||||
|
(
|
||||||
|
(-1, -1, 'min=-1 must be > 0'),
|
||||||
|
(-1, 0, 'min=-1 must be > 0'),
|
||||||
|
(-1, 1, 'min=-1 must be > 0'),
|
||||||
|
(-1, 2, 'min=-1 must be > 0'),
|
||||||
|
(0, -1, 'min=0 must be > 0'),
|
||||||
|
(0, 0, 'min=0 must be > 0'),
|
||||||
|
(0, 1, 'min=0 must be > 0'),
|
||||||
|
(0, 2, 'min=0 must be > 0'),
|
||||||
|
(1, 0, 'Expected an integer or the infinity value for the `max` argument but got 0.'),
|
||||||
|
(1, 0.5, 'Expected an integer or the infinity value for the `max` argument but got 0.5.'),
|
||||||
|
(2, 0, 'Expected an integer or the infinity value for the `max` argument but got 0.'),
|
||||||
|
(2, '1', "Expected an integer or the infinity value for the `max` argument but got '1'."),
|
||||||
|
(2, 1, 'max=1 must be > min=2'),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
def test_threadpool_invalid_threadrange(minthreads, maxthreads, error):
|
||||||
|
"""Test that a ThreadPool rejects invalid min/max values.
|
||||||
|
|
||||||
|
The ThreadPool should raise an error with the proper message when
|
||||||
|
initialized with an invalid min+max number of threads.
|
||||||
|
"""
|
||||||
|
with pytest.raises((ValueError, TypeError), match=error):
|
||||||
|
ThreadPool(
|
||||||
|
server=None,
|
||||||
|
min=minthreads,
|
||||||
|
max=maxthreads,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_threadpool_multistart_validation(monkeypatch):
|
||||||
|
"""Test for ThreadPool multi-start behavior.
|
||||||
|
|
||||||
|
Tests that when calling start() on a ThreadPool multiple times raises a
|
||||||
|
:exc:`RuntimeError`
|
||||||
|
"""
|
||||||
|
# replace _spawn_worker with a function that returns a placeholder to avoid
|
||||||
|
# actually starting any threads
|
||||||
|
monkeypatch.setattr(
|
||||||
|
ThreadPool,
|
||||||
|
'_spawn_worker',
|
||||||
|
lambda _: types.SimpleNamespace(ready=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
tp = ThreadPool(server=None)
|
||||||
|
tp.start()
|
||||||
|
with pytest.raises(RuntimeError, match='Threadpools can only be started once.'):
|
||||||
|
tp.start()
|
||||||
|
|
|
@ -55,17 +55,6 @@ _stdlib_to_openssl_verify = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fails_under_py3 = pytest.mark.xfail(
|
|
||||||
reason='Fails under Python 3+',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
fails_under_py3_in_pypy = pytest.mark.xfail(
|
|
||||||
IS_PYPY,
|
|
||||||
reason='Fails under PyPy3',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
missing_ipv6 = pytest.mark.skipif(
|
missing_ipv6 = pytest.mark.skipif(
|
||||||
not _probe_ipv6_sock('::1'),
|
not _probe_ipv6_sock('::1'),
|
||||||
reason=''
|
reason=''
|
||||||
|
@ -556,7 +545,6 @@ def test_ssl_env( # noqa: C901 # FIXME
|
||||||
|
|
||||||
# builtin ssl environment generation may use a loopback socket
|
# builtin ssl environment generation may use a loopback socket
|
||||||
# ensure no ResourceWarning was raised during the test
|
# ensure no ResourceWarning was raised during the test
|
||||||
# NOTE: python 2.7 does not emit ResourceWarning for ssl sockets
|
|
||||||
if IS_PYPY:
|
if IS_PYPY:
|
||||||
# NOTE: PyPy doesn't have ResourceWarning
|
# NOTE: PyPy doesn't have ResourceWarning
|
||||||
# Ref: https://doc.pypy.org/en/latest/cpython_differences.html
|
# Ref: https://doc.pypy.org/en/latest/cpython_differences.html
|
||||||
|
|
|
@ -463,16 +463,13 @@ def shb(response):
|
||||||
return resp_status_line, response.getheaders(), response.read()
|
return resp_status_line, response.getheaders(), response.read()
|
||||||
|
|
||||||
|
|
||||||
# def openURL(*args, raise_subcls=(), **kwargs):
|
def openURL(*args, raise_subcls=(), **kwargs):
|
||||||
# py27 compatible signature:
|
|
||||||
def openURL(*args, **kwargs):
|
|
||||||
"""
|
"""
|
||||||
Open a URL, retrying when it fails.
|
Open a URL, retrying when it fails.
|
||||||
|
|
||||||
Specify ``raise_subcls`` (class or tuple of classes) to exclude
|
Specify ``raise_subcls`` (class or tuple of classes) to exclude
|
||||||
those socket.error subclasses from being suppressed and retried.
|
those socket.error subclasses from being suppressed and retried.
|
||||||
"""
|
"""
|
||||||
raise_subcls = kwargs.pop('raise_subcls', ())
|
|
||||||
opener = functools.partial(_open_url_once, *args, **kwargs)
|
opener = functools.partial(_open_url_once, *args, **kwargs)
|
||||||
|
|
||||||
def on_exception():
|
def on_exception():
|
||||||
|
|
|
@ -119,9 +119,7 @@ def _probe_ipv6_sock(interface):
|
||||||
try:
|
try:
|
||||||
with closing(socket.socket(family=socket.AF_INET6)) as sock:
|
with closing(socket.socket(family=socket.AF_INET6)) as sock:
|
||||||
sock.bind((interface, 0))
|
sock.bind((interface, 0))
|
||||||
except (OSError, socket.error) as sock_err:
|
except OSError as sock_err:
|
||||||
# In Python 3 socket.error is an alias for OSError
|
|
||||||
# In Python 2 socket.error is a subclass of IOError
|
|
||||||
if sock_err.errno != errno.EADDRNOTAVAIL:
|
if sock_err.errno != errno.EADDRNOTAVAIL:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -151,12 +151,33 @@ class ThreadPool:
|
||||||
server (cheroot.server.HTTPServer): web server object
|
server (cheroot.server.HTTPServer): web server object
|
||||||
receiving this request
|
receiving this request
|
||||||
min (int): minimum number of worker threads
|
min (int): minimum number of worker threads
|
||||||
max (int): maximum number of worker threads
|
max (int): maximum number of worker threads (-1/inf for no max)
|
||||||
accepted_queue_size (int): maximum number of active
|
accepted_queue_size (int): maximum number of active
|
||||||
requests in queue
|
requests in queue
|
||||||
accepted_queue_timeout (int): timeout for putting request
|
accepted_queue_timeout (int): timeout for putting request
|
||||||
into queue
|
into queue
|
||||||
|
|
||||||
|
:raises ValueError: if the min/max values are invalid
|
||||||
|
:raises TypeError: if the max is not an integer or inf
|
||||||
"""
|
"""
|
||||||
|
if min < 1:
|
||||||
|
raise ValueError(f'min={min!s} must be > 0')
|
||||||
|
|
||||||
|
if max == float('inf'):
|
||||||
|
pass
|
||||||
|
elif not isinstance(max, int) or max == 0:
|
||||||
|
raise TypeError(
|
||||||
|
'Expected an integer or the infinity value for the `max` '
|
||||||
|
f'argument but got {max!r}.',
|
||||||
|
)
|
||||||
|
elif max < 0:
|
||||||
|
max = float('inf')
|
||||||
|
|
||||||
|
if max < min:
|
||||||
|
raise ValueError(
|
||||||
|
f'max={max!s} must be > min={min!s} (or infinity for no max)',
|
||||||
|
)
|
||||||
|
|
||||||
self.server = server
|
self.server = server
|
||||||
self.min = min
|
self.min = min
|
||||||
self.max = max
|
self.max = max
|
||||||
|
@ -167,18 +188,13 @@ class ThreadPool:
|
||||||
self._pending_shutdowns = collections.deque()
|
self._pending_shutdowns = collections.deque()
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start the pool of threads."""
|
"""Start the pool of threads.
|
||||||
for _ in range(self.min):
|
|
||||||
self._threads.append(WorkerThread(self.server))
|
:raises RuntimeError: if the pool is already started
|
||||||
for worker in self._threads:
|
"""
|
||||||
worker.name = (
|
if self._threads:
|
||||||
'CP Server {worker_name!s}'.
|
raise RuntimeError('Threadpools can only be started once.')
|
||||||
format(worker_name=worker.name)
|
self.grow(self.min)
|
||||||
)
|
|
||||||
worker.start()
|
|
||||||
for worker in self._threads:
|
|
||||||
while not worker.ready:
|
|
||||||
time.sleep(.1)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def idle(self): # noqa: D401; irrelevant for properties
|
def idle(self): # noqa: D401; irrelevant for properties
|
||||||
|
@ -206,16 +222,12 @@ class ThreadPool:
|
||||||
|
|
||||||
def grow(self, amount):
|
def grow(self, amount):
|
||||||
"""Spawn new worker threads (not above self.max)."""
|
"""Spawn new worker threads (not above self.max)."""
|
||||||
if self.max > 0:
|
|
||||||
budget = max(self.max - len(self._threads), 0)
|
budget = max(self.max - len(self._threads), 0)
|
||||||
else:
|
|
||||||
# self.max <= 0 indicates no maximum
|
|
||||||
budget = float('inf')
|
|
||||||
|
|
||||||
n_new = min(amount, budget)
|
n_new = min(amount, budget)
|
||||||
|
|
||||||
workers = [self._spawn_worker() for i in range(n_new)]
|
workers = [self._spawn_worker() for i in range(n_new)]
|
||||||
while not all(worker.ready for worker in workers):
|
for worker in workers:
|
||||||
|
while not worker.ready:
|
||||||
time.sleep(.1)
|
time.sleep(.1)
|
||||||
self._threads.extend(workers)
|
self._threads.extend(workers)
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,7 @@ class Server(server.HTTPServer):
|
||||||
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
|
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
|
||||||
accepted_queue_size=-1, accepted_queue_timeout=10,
|
accepted_queue_size=-1, accepted_queue_timeout=10,
|
||||||
peercreds_enabled=False, peercreds_resolve_enabled=False,
|
peercreds_enabled=False, peercreds_resolve_enabled=False,
|
||||||
|
reuse_port=False,
|
||||||
):
|
):
|
||||||
"""Initialize WSGI Server instance.
|
"""Initialize WSGI Server instance.
|
||||||
|
|
||||||
|
@ -69,6 +70,7 @@ class Server(server.HTTPServer):
|
||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
peercreds_enabled=peercreds_enabled,
|
peercreds_enabled=peercreds_enabled,
|
||||||
peercreds_resolve_enabled=peercreds_resolve_enabled,
|
peercreds_resolve_enabled=peercreds_resolve_enabled,
|
||||||
|
reuse_port=reuse_port,
|
||||||
)
|
)
|
||||||
self.wsgi_app = wsgi_app
|
self.wsgi_app = wsgi_app
|
||||||
self.request_queue_size = request_queue_size
|
self.request_queue_size = request_queue_size
|
||||||
|
|
|
@ -8,7 +8,7 @@ class Server(server.HTTPServer):
|
||||||
timeout: Any
|
timeout: Any
|
||||||
shutdown_timeout: Any
|
shutdown_timeout: Any
|
||||||
requests: Any
|
requests: Any
|
||||||
def __init__(self, bind_addr, wsgi_app, numthreads: int = ..., server_name: Any | None = ..., max: int = ..., request_queue_size: int = ..., timeout: int = ..., shutdown_timeout: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
|
def __init__(self, bind_addr, wsgi_app, numthreads: int = ..., server_name: Any | None = ..., max: int = ..., request_queue_size: int = ..., timeout: int = ..., shutdown_timeout: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ..., reuse_port: bool = ...) -> None: ...
|
||||||
@property
|
@property
|
||||||
def numthreads(self): ...
|
def numthreads(self): ...
|
||||||
@numthreads.setter
|
@numthreads.setter
|
||||||
|
|
|
@ -38,7 +38,7 @@ CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAA
|
||||||
URI_SCHEME = "cloudinary"
|
URI_SCHEME = "cloudinary"
|
||||||
API_VERSION = "v1_1"
|
API_VERSION = "v1_1"
|
||||||
|
|
||||||
VERSION = "1.32.0"
|
VERSION = "1.34.0"
|
||||||
|
|
||||||
_USER_PLATFORM_DETAILS = "; ".join((platform(), "Python {}".format(python_version())))
|
_USER_PLATFORM_DETAILS = "; ".join((platform(), "Python {}".format(python_version())))
|
||||||
|
|
||||||
|
|
|
@ -155,6 +155,26 @@ def resources_by_context(key, value=None, **options):
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def visual_search(image_url=None, image_asset_id=None, text=None, **options):
|
||||||
|
"""
|
||||||
|
Find images based on their visual content.
|
||||||
|
|
||||||
|
:param image_url: The URL of an image.
|
||||||
|
:type image_url: str
|
||||||
|
:param image_asset_id: The asset_id of an image in your account.
|
||||||
|
:type image_asset_id: str
|
||||||
|
:param text: A textual description, e.g., "cat"
|
||||||
|
:type text: str
|
||||||
|
:param options: Additional options
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: Resources (assets) that were found
|
||||||
|
:rtype: Response
|
||||||
|
"""
|
||||||
|
uri = ["resources", "visual_search"]
|
||||||
|
params = {"image_url": image_url, "image_asset_id": image_asset_id, "text": text}
|
||||||
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def resource(public_id, **options):
|
def resource(public_id, **options):
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
upload_type = options.pop("type", "upload")
|
upload_type = options.pop("type", "upload")
|
||||||
|
@ -317,6 +337,24 @@ def add_related_assets(public_id, assets_to_relate, resource_type="image", type=
|
||||||
return call_json_api("post", uri, params, **options)
|
return call_json_api("post", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def add_related_assets_by_asset_ids(asset_id, assets_to_relate, **options):
|
||||||
|
"""
|
||||||
|
Relates an asset to other assets by asset IDs.
|
||||||
|
|
||||||
|
:param asset_id: The asset ID of the asset to update.
|
||||||
|
:type asset_id: str
|
||||||
|
:param assets_to_relate: The array of up to 10 asset IDs.
|
||||||
|
:type assets_to_relate: list[str]
|
||||||
|
:param options: Additional options.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: The result of the command.
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = ["resources", "related_assets", asset_id]
|
||||||
|
params = {"assets_to_relate": utils.build_array(assets_to_relate)}
|
||||||
|
return call_json_api("post", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def delete_related_assets(public_id, assets_to_unrelate, resource_type="image", type="upload", **options):
|
def delete_related_assets(public_id, assets_to_unrelate, resource_type="image", type="upload", **options):
|
||||||
"""
|
"""
|
||||||
Unrelates an asset from other assets by public IDs.
|
Unrelates an asset from other assets by public IDs.
|
||||||
|
@ -339,6 +377,24 @@ def delete_related_assets(public_id, assets_to_unrelate, resource_type="image",
|
||||||
return call_json_api("delete", uri, params, **options)
|
return call_json_api("delete", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_related_assets_by_asset_ids(asset_id, assets_to_unrelate, **options):
|
||||||
|
"""
|
||||||
|
Unrelates an asset from other assets by asset IDs.
|
||||||
|
|
||||||
|
:param asset_id: The asset ID of the asset to update.
|
||||||
|
:type asset_id: str
|
||||||
|
:param assets_to_unrelate: The array of up to 10 asset IDs.
|
||||||
|
:type assets_to_unrelate: list[str]
|
||||||
|
:param options: Additional options.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: The result of the command.
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = ["resources", "related_assets", asset_id]
|
||||||
|
params = {"assets_to_unrelate": utils.build_array(assets_to_unrelate)}
|
||||||
|
return call_json_api("delete", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def tags(**options):
|
def tags(**options):
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
uri = ["tags", resource_type]
|
uri = ["tags", resource_type]
|
||||||
|
|
|
@ -31,7 +31,7 @@ def call_api(method, uri, params, **options):
|
||||||
return _call_api(method, uri, params=params, **options)
|
return _call_api(method, uri, params=params, **options)
|
||||||
|
|
||||||
|
|
||||||
def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
def _call_api(method, uri, params=None, body=None, headers=None, extra_headers=None, **options):
|
||||||
prefix = options.pop("upload_prefix",
|
prefix = options.pop("upload_prefix",
|
||||||
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
cloudinary.config().upload_prefix) or "https://api.cloudinary.com"
|
||||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name)
|
||||||
|
@ -50,6 +50,9 @@ def _call_api(method, uri, params=None, body=None, headers=None, **options):
|
||||||
if body is not None:
|
if body is not None:
|
||||||
options["body"] = body
|
options["body"] = body
|
||||||
|
|
||||||
|
if extra_headers is not None:
|
||||||
|
headers.update(extra_headers)
|
||||||
|
|
||||||
return execute_request(http_connector=_http,
|
return execute_request(http_connector=_http,
|
||||||
method=method,
|
method=method,
|
||||||
params=params,
|
params=params,
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import cloudinary
|
||||||
from cloudinary.api_client.call_api import call_json_api
|
from cloudinary.api_client.call_api import call_json_api
|
||||||
from cloudinary.utils import unique
|
from cloudinary.utils import unique, unsigned_download_url_prefix, build_distribution_domain, base64url_encode, \
|
||||||
|
json_encode, compute_hex_hash, SIGNATURE_SHA256
|
||||||
|
|
||||||
|
|
||||||
class Search(object):
|
class Search(object):
|
||||||
|
@ -15,7 +18,10 @@ class Search(object):
|
||||||
'with_field': None,
|
'with_field': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_ttl = 300 # Used for search URLs
|
||||||
|
|
||||||
"""Build and execute a search query."""
|
"""Build and execute a search query."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.query = {}
|
self.query = {}
|
||||||
|
|
||||||
|
@ -51,6 +57,16 @@ class Search(object):
|
||||||
self._add("with_field", value)
|
self._add("with_field", value)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def ttl(self, ttl):
|
||||||
|
"""
|
||||||
|
Sets the time to live of the search URL.
|
||||||
|
|
||||||
|
:param ttl: The time to live in seconds.
|
||||||
|
:return: self
|
||||||
|
"""
|
||||||
|
self._ttl = ttl
|
||||||
|
return self
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
return json.dumps(self.as_dict())
|
return json.dumps(self.as_dict())
|
||||||
|
|
||||||
|
@ -60,12 +76,6 @@ class Search(object):
|
||||||
uri = [self._endpoint, 'search']
|
uri = [self._endpoint, 'search']
|
||||||
return call_json_api('post', uri, self.as_dict(), **options)
|
return call_json_api('post', uri, self.as_dict(), **options)
|
||||||
|
|
||||||
def _add(self, name, value):
|
|
||||||
if name not in self.query:
|
|
||||||
self.query[name] = []
|
|
||||||
self.query[name].append(value)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def as_dict(self):
|
def as_dict(self):
|
||||||
to_return = {}
|
to_return = {}
|
||||||
|
|
||||||
|
@ -77,6 +87,51 @@ class Search(object):
|
||||||
|
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
|
def to_url(self, ttl=None, next_cursor=None, **options):
|
||||||
|
"""
|
||||||
|
Creates a signed Search URL that can be used on the client side.
|
||||||
|
|
||||||
|
:param ttl: The time to live in seconds.
|
||||||
|
:param next_cursor: Starting position.
|
||||||
|
:param options: Additional url delivery options.
|
||||||
|
:return: The resulting search URL.
|
||||||
|
"""
|
||||||
|
api_secret = options.get("api_secret", cloudinary.config().api_secret or None)
|
||||||
|
if not api_secret:
|
||||||
|
raise ValueError("Must supply api_secret")
|
||||||
|
|
||||||
|
if ttl is None:
|
||||||
|
ttl = self._ttl
|
||||||
|
|
||||||
|
query = self.as_dict()
|
||||||
|
|
||||||
|
_next_cursor = query.pop("next_cursor", None)
|
||||||
|
if next_cursor is None:
|
||||||
|
next_cursor = _next_cursor
|
||||||
|
|
||||||
|
b64query = base64url_encode(json_encode(query, sort_keys=True))
|
||||||
|
|
||||||
|
prefix = build_distribution_domain(options)
|
||||||
|
|
||||||
|
signature = compute_hex_hash("{ttl}{b64query}{api_secret}".format(
|
||||||
|
ttl=ttl,
|
||||||
|
b64query=b64query,
|
||||||
|
api_secret=api_secret
|
||||||
|
), algorithm=SIGNATURE_SHA256)
|
||||||
|
|
||||||
|
return "{prefix}/search/{signature}/{ttl}/{b64query}{next_cursor}".format(
|
||||||
|
prefix=prefix,
|
||||||
|
signature=signature,
|
||||||
|
ttl=ttl,
|
||||||
|
b64query=b64query,
|
||||||
|
next_cursor="/{}".format(next_cursor) if next_cursor else "")
|
||||||
|
|
||||||
def endpoint(self, endpoint):
|
def endpoint(self, endpoint):
|
||||||
self._endpoint = endpoint
|
self._endpoint = endpoint
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def _add(self, name, value):
|
||||||
|
if name not in self.query:
|
||||||
|
self.query[name] = []
|
||||||
|
self.query[name].append(value)
|
||||||
|
return self
|
||||||
|
|
|
@ -472,7 +472,8 @@ def call_cacheable_api(action, params, http_headers=None, return_error=False, un
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None, **options):
|
def call_api(action, params, http_headers=None, return_error=False, unsigned=False, file=None, timeout=None,
|
||||||
|
extra_headers=None, **options):
|
||||||
params = utils.cleanup_params(params)
|
params = utils.cleanup_params(params)
|
||||||
|
|
||||||
headers = {"User-Agent": cloudinary.get_user_agent()}
|
headers = {"User-Agent": cloudinary.get_user_agent()}
|
||||||
|
@ -480,6 +481,9 @@ def call_api(action, params, http_headers=None, return_error=False, unsigned=Fal
|
||||||
if http_headers is not None:
|
if http_headers is not None:
|
||||||
headers.update(http_headers)
|
headers.update(http_headers)
|
||||||
|
|
||||||
|
if extra_headers is not None:
|
||||||
|
headers.update(extra_headers)
|
||||||
|
|
||||||
oauth_token = options.get("oauth_token", cloudinary.config().oauth_token)
|
oauth_token = options.get("oauth_token", cloudinary.config().oauth_token)
|
||||||
|
|
||||||
if oauth_token:
|
if oauth_token:
|
||||||
|
|
|
@ -92,6 +92,7 @@ __SIMPLE_UPLOAD_PARAMS = [
|
||||||
"eager_notification_url",
|
"eager_notification_url",
|
||||||
"eager_async",
|
"eager_async",
|
||||||
"eval",
|
"eval",
|
||||||
|
"on_success",
|
||||||
"proxy",
|
"proxy",
|
||||||
"folder",
|
"folder",
|
||||||
"asset_folder",
|
"asset_folder",
|
||||||
|
@ -106,6 +107,7 @@ __SIMPLE_UPLOAD_PARAMS = [
|
||||||
"categorization",
|
"categorization",
|
||||||
"detection",
|
"detection",
|
||||||
"similarity_search",
|
"similarity_search",
|
||||||
|
"visual_search",
|
||||||
"background_removal",
|
"background_removal",
|
||||||
"upload_preset",
|
"upload_preset",
|
||||||
"phash",
|
"phash",
|
||||||
|
@ -281,7 +283,7 @@ def encode_context(context):
|
||||||
return "|".join(("{}={}".format(k, normalize_context_value(v))) for k, v in iteritems(context))
|
return "|".join(("{}={}".format(k, normalize_context_value(v))) for k, v in iteritems(context))
|
||||||
|
|
||||||
|
|
||||||
def json_encode(value):
|
def json_encode(value, sort_keys=False):
|
||||||
"""
|
"""
|
||||||
Converts value to a json encoded string
|
Converts value to a json encoded string
|
||||||
|
|
||||||
|
@ -289,7 +291,7 @@ def json_encode(value):
|
||||||
|
|
||||||
:return: JSON encoded string
|
:return: JSON encoded string
|
||||||
"""
|
"""
|
||||||
return json.dumps(value, default=__json_serializer, separators=(',', ':'))
|
return json.dumps(value, default=__json_serializer, separators=(',', ':'), sort_keys=sort_keys)
|
||||||
|
|
||||||
|
|
||||||
def encode_date_to_usage_api_format(date_obj):
|
def encode_date_to_usage_api_format(date_obj):
|
||||||
|
@ -373,8 +375,17 @@ def generate_transformation_string(**options):
|
||||||
flags = ".".join(build_array(options.pop("flags", None)))
|
flags = ".".join(build_array(options.pop("flags", None)))
|
||||||
dpr = options.pop("dpr", cloudinary.config().dpr)
|
dpr = options.pop("dpr", cloudinary.config().dpr)
|
||||||
duration = norm_range_value(options.pop("duration", None))
|
duration = norm_range_value(options.pop("duration", None))
|
||||||
start_offset = norm_auto_range_value(options.pop("start_offset", None))
|
|
||||||
end_offset = norm_range_value(options.pop("end_offset", None))
|
so_raw = options.pop("start_offset", None)
|
||||||
|
start_offset = norm_auto_range_value(so_raw)
|
||||||
|
if start_offset == None:
|
||||||
|
start_offset = so_raw
|
||||||
|
|
||||||
|
eo_raw = options.pop("end_offset", None)
|
||||||
|
end_offset = norm_range_value(eo_raw)
|
||||||
|
if end_offset == None:
|
||||||
|
end_offset = eo_raw
|
||||||
|
|
||||||
offset = split_range(options.pop("offset", None))
|
offset = split_range(options.pop("offset", None))
|
||||||
if offset:
|
if offset:
|
||||||
start_offset = norm_auto_range_value(offset[0])
|
start_offset = norm_auto_range_value(offset[0])
|
||||||
|
@ -700,6 +711,25 @@ def unsigned_download_url_prefix(source, cloud_name, private_cdn, cdn_subdomain,
|
||||||
return prefix
|
return prefix
|
||||||
|
|
||||||
|
|
||||||
|
def build_distribution_domain(options):
|
||||||
|
source = options.pop('source', '')
|
||||||
|
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name or None)
|
||||||
|
if cloud_name is None:
|
||||||
|
raise ValueError("Must supply cloud_name in tag or in configuration")
|
||||||
|
secure = options.pop("secure", cloudinary.config().secure)
|
||||||
|
private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn)
|
||||||
|
cname = options.pop("cname", cloudinary.config().cname)
|
||||||
|
secure_distribution = options.pop("secure_distribution",
|
||||||
|
cloudinary.config().secure_distribution)
|
||||||
|
cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain)
|
||||||
|
secure_cdn_subdomain = options.pop("secure_cdn_subdomain",
|
||||||
|
cloudinary.config().secure_cdn_subdomain)
|
||||||
|
|
||||||
|
return unsigned_download_url_prefix(
|
||||||
|
source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain,
|
||||||
|
cname, secure, secure_distribution)
|
||||||
|
|
||||||
|
|
||||||
def merge(*dict_args):
|
def merge(*dict_args):
|
||||||
result = None
|
result = None
|
||||||
for dictionary in dict_args:
|
for dictionary in dict_args:
|
||||||
|
@ -728,19 +758,8 @@ def cloudinary_url(source, **options):
|
||||||
version = options.pop("version", None)
|
version = options.pop("version", None)
|
||||||
|
|
||||||
format = options.pop("format", None)
|
format = options.pop("format", None)
|
||||||
cdn_subdomain = options.pop("cdn_subdomain", cloudinary.config().cdn_subdomain)
|
|
||||||
secure_cdn_subdomain = options.pop("secure_cdn_subdomain",
|
|
||||||
cloudinary.config().secure_cdn_subdomain)
|
|
||||||
cname = options.pop("cname", cloudinary.config().cname)
|
|
||||||
shorten = options.pop("shorten", cloudinary.config().shorten)
|
shorten = options.pop("shorten", cloudinary.config().shorten)
|
||||||
|
|
||||||
cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name or None)
|
|
||||||
if cloud_name is None:
|
|
||||||
raise ValueError("Must supply cloud_name in tag or in configuration")
|
|
||||||
secure = options.pop("secure", cloudinary.config().secure)
|
|
||||||
private_cdn = options.pop("private_cdn", cloudinary.config().private_cdn)
|
|
||||||
secure_distribution = options.pop("secure_distribution",
|
|
||||||
cloudinary.config().secure_distribution)
|
|
||||||
sign_url = options.pop("sign_url", cloudinary.config().sign_url)
|
sign_url = options.pop("sign_url", cloudinary.config().sign_url)
|
||||||
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
api_secret = options.pop("api_secret", cloudinary.config().api_secret)
|
||||||
url_suffix = options.pop("url_suffix", None)
|
url_suffix = options.pop("url_suffix", None)
|
||||||
|
@ -786,9 +805,9 @@ def cloudinary_url(source, **options):
|
||||||
base64.urlsafe_b64encode(
|
base64.urlsafe_b64encode(
|
||||||
hash_fn(to_bytes(to_sign + api_secret)).digest())[0:chars_length]) + "--"
|
hash_fn(to_bytes(to_sign + api_secret)).digest())[0:chars_length]) + "--"
|
||||||
|
|
||||||
prefix = unsigned_download_url_prefix(
|
options["source"] = source
|
||||||
source, cloud_name, private_cdn, cdn_subdomain, secure_cdn_subdomain,
|
prefix = build_distribution_domain(options)
|
||||||
cname, secure, secure_distribution)
|
|
||||||
source = "/".join(__compact(
|
source = "/".join(__compact(
|
||||||
[prefix, resource_type, type, signature, transformation, version, source]))
|
[prefix, resource_type, type, signature, transformation, version, source]))
|
||||||
if sign_url and auth_token:
|
if sign_url and auth_token:
|
||||||
|
@ -999,6 +1018,7 @@ def archive_params(**options):
|
||||||
"skip_transformation_name": options.get("skip_transformation_name"),
|
"skip_transformation_name": options.get("skip_transformation_name"),
|
||||||
"tags": options.get("tags") and build_array(options.get("tags")),
|
"tags": options.get("tags") and build_array(options.get("tags")),
|
||||||
"target_format": options.get("target_format"),
|
"target_format": options.get("target_format"),
|
||||||
|
"target_asset_folder": options.get("target_asset_folder"),
|
||||||
"target_public_id": options.get("target_public_id"),
|
"target_public_id": options.get("target_public_id"),
|
||||||
"target_tags": options.get("target_tags") and build_array(options.get("target_tags")),
|
"target_tags": options.get("target_tags") and build_array(options.get("target_tags")),
|
||||||
"timestamp": timestamp,
|
"timestamp": timestamp,
|
||||||
|
|
|
@ -22,6 +22,7 @@ __all__ = [
|
||||||
"asyncquery",
|
"asyncquery",
|
||||||
"asyncresolver",
|
"asyncresolver",
|
||||||
"dnssec",
|
"dnssec",
|
||||||
|
"dnssecalgs",
|
||||||
"dnssectypes",
|
"dnssectypes",
|
||||||
"e164",
|
"e164",
|
||||||
"edns",
|
"edns",
|
||||||
|
|
|
@ -35,6 +35,9 @@ class Socket: # pragma: no cover
|
||||||
async def getsockname(self):
|
async def getsockname(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def getpeercert(self, timeout):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -61,6 +64,11 @@ class StreamSocket(Socket): # pragma: no cover
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class NullTransport:
|
||||||
|
async def connect_tcp(self, host, port, timeout, local_address):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class Backend: # pragma: no cover
|
class Backend: # pragma: no cover
|
||||||
def name(self):
|
def name(self):
|
||||||
return "unknown"
|
return "unknown"
|
||||||
|
@ -83,3 +91,9 @@ class Backend: # pragma: no cover
|
||||||
|
|
||||||
async def sleep(self, interval):
|
async def sleep(self, interval):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_transport_class(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def wait_for(self, awaitable, timeout):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
@ -2,14 +2,13 @@
|
||||||
|
|
||||||
"""asyncio library query support"""
|
"""asyncio library query support"""
|
||||||
|
|
||||||
import socket
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import dns._asyncbackend
|
import dns._asyncbackend
|
||||||
import dns.exception
|
import dns.exception
|
||||||
|
|
||||||
|
|
||||||
_is_win32 = sys.platform == "win32"
|
_is_win32 = sys.platform == "win32"
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,6 +37,13 @@ class _DatagramProtocol:
|
||||||
|
|
||||||
def connection_lost(self, exc):
|
def connection_lost(self, exc):
|
||||||
if self.recvfrom and not self.recvfrom.done():
|
if self.recvfrom and not self.recvfrom.done():
|
||||||
|
if exc is None:
|
||||||
|
# EOF we triggered. Is there a better way to do this?
|
||||||
|
try:
|
||||||
|
raise EOFError
|
||||||
|
except EOFError as e:
|
||||||
|
self.recvfrom.set_exception(e)
|
||||||
|
else:
|
||||||
self.recvfrom.set_exception(exc)
|
self.recvfrom.set_exception(exc)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
@ -45,7 +51,7 @@ class _DatagramProtocol:
|
||||||
|
|
||||||
|
|
||||||
async def _maybe_wait_for(awaitable, timeout):
|
async def _maybe_wait_for(awaitable, timeout):
|
||||||
if timeout:
|
if timeout is not None:
|
||||||
try:
|
try:
|
||||||
return await asyncio.wait_for(awaitable, timeout)
|
return await asyncio.wait_for(awaitable, timeout)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
|
@ -85,6 +91,9 @@ class DatagramSocket(dns._asyncbackend.DatagramSocket):
|
||||||
async def getsockname(self):
|
async def getsockname(self):
|
||||||
return self.transport.get_extra_info("sockname")
|
return self.transport.get_extra_info("sockname")
|
||||||
|
|
||||||
|
async def getpeercert(self, timeout):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class StreamSocket(dns._asyncbackend.StreamSocket):
|
class StreamSocket(dns._asyncbackend.StreamSocket):
|
||||||
def __init__(self, af, reader, writer):
|
def __init__(self, af, reader, writer):
|
||||||
|
@ -101,10 +110,6 @@ class StreamSocket(dns._asyncbackend.StreamSocket):
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
self.writer.close()
|
self.writer.close()
|
||||||
try:
|
|
||||||
await self.writer.wait_closed()
|
|
||||||
except AttributeError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def getpeername(self):
|
async def getpeername(self):
|
||||||
return self.writer.get_extra_info("peername")
|
return self.writer.get_extra_info("peername")
|
||||||
|
@ -112,6 +117,97 @@ class StreamSocket(dns._asyncbackend.StreamSocket):
|
||||||
async def getsockname(self):
|
async def getsockname(self):
|
||||||
return self.writer.get_extra_info("sockname")
|
return self.writer.get_extra_info("sockname")
|
||||||
|
|
||||||
|
async def getpeercert(self, timeout):
|
||||||
|
return self.writer.get_extra_info("peercert")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import anyio
|
||||||
|
import httpcore
|
||||||
|
import httpcore._backends.anyio
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
_CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend
|
||||||
|
_CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream
|
||||||
|
|
||||||
|
from dns.query import _compute_times, _expiration_for_this_attempt, _remaining
|
||||||
|
|
||||||
|
class _NetworkBackend(_CoreAsyncNetworkBackend):
|
||||||
|
def __init__(self, resolver, local_port, bootstrap_address, family):
|
||||||
|
super().__init__()
|
||||||
|
self._local_port = local_port
|
||||||
|
self._resolver = resolver
|
||||||
|
self._bootstrap_address = bootstrap_address
|
||||||
|
self._family = family
|
||||||
|
if local_port != 0:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"the asyncio transport for HTTPX cannot set the local port"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def connect_tcp(
|
||||||
|
self, host, port, timeout, local_address, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
addresses = []
|
||||||
|
_, expiration = _compute_times(timeout)
|
||||||
|
if dns.inet.is_address(host):
|
||||||
|
addresses.append(host)
|
||||||
|
elif self._bootstrap_address is not None:
|
||||||
|
addresses.append(self._bootstrap_address)
|
||||||
|
else:
|
||||||
|
timeout = _remaining(expiration)
|
||||||
|
family = self._family
|
||||||
|
if local_address:
|
||||||
|
family = dns.inet.af_for_address(local_address)
|
||||||
|
answers = await self._resolver.resolve_name(
|
||||||
|
host, family=family, lifetime=timeout
|
||||||
|
)
|
||||||
|
addresses = answers.addresses()
|
||||||
|
for address in addresses:
|
||||||
|
try:
|
||||||
|
attempt_expiration = _expiration_for_this_attempt(2.0, expiration)
|
||||||
|
timeout = _remaining(attempt_expiration)
|
||||||
|
with anyio.fail_after(timeout):
|
||||||
|
stream = await anyio.connect_tcp(
|
||||||
|
remote_host=address,
|
||||||
|
remote_port=port,
|
||||||
|
local_host=local_address,
|
||||||
|
)
|
||||||
|
return _CoreAnyIOStream(stream)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise httpcore.ConnectError
|
||||||
|
|
||||||
|
async def connect_unix_socket(
|
||||||
|
self, path, timeout, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sleep(self, seconds): # pylint: disable=signature-differs
|
||||||
|
await anyio.sleep(seconds)
|
||||||
|
|
||||||
|
class _HTTPTransport(httpx.AsyncHTTPTransport):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
local_port=0,
|
||||||
|
bootstrap_address=None,
|
||||||
|
resolver=None,
|
||||||
|
family=socket.AF_UNSPEC,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
if resolver is None:
|
||||||
|
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
||||||
|
import dns.asyncresolver
|
||||||
|
|
||||||
|
resolver = dns.asyncresolver.Resolver()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._pool._network_backend = _NetworkBackend(
|
||||||
|
resolver, local_port, bootstrap_address, family
|
||||||
|
)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Backend(dns._asyncbackend.Backend):
|
class Backend(dns._asyncbackend.Backend):
|
||||||
def name(self):
|
def name(self):
|
||||||
|
@ -171,3 +267,9 @@ class Backend(dns._asyncbackend.Backend):
|
||||||
|
|
||||||
def datagram_connection_required(self):
|
def datagram_connection_required(self):
|
||||||
return _is_win32
|
return _is_win32
|
||||||
|
|
||||||
|
def get_transport_class(self):
|
||||||
|
return _HTTPTransport
|
||||||
|
|
||||||
|
async def wait_for(self, awaitable, timeout):
|
||||||
|
return await _maybe_wait_for(awaitable, timeout)
|
||||||
|
|
|
@ -1,122 +0,0 @@
|
||||||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
|
||||||
|
|
||||||
"""curio async I/O library query support"""
|
|
||||||
|
|
||||||
import socket
|
|
||||||
import curio
|
|
||||||
import curio.socket # type: ignore
|
|
||||||
|
|
||||||
import dns._asyncbackend
|
|
||||||
import dns.exception
|
|
||||||
import dns.inet
|
|
||||||
|
|
||||||
|
|
||||||
def _maybe_timeout(timeout):
|
|
||||||
if timeout:
|
|
||||||
return curio.ignore_after(timeout)
|
|
||||||
else:
|
|
||||||
return dns._asyncbackend.NullContext()
|
|
||||||
|
|
||||||
|
|
||||||
# for brevity
|
|
||||||
_lltuple = dns.inet.low_level_address_tuple
|
|
||||||
|
|
||||||
# pylint: disable=redefined-outer-name
|
|
||||||
|
|
||||||
|
|
||||||
class DatagramSocket(dns._asyncbackend.DatagramSocket):
|
|
||||||
def __init__(self, socket):
|
|
||||||
super().__init__(socket.family)
|
|
||||||
self.socket = socket
|
|
||||||
|
|
||||||
async def sendto(self, what, destination, timeout):
|
|
||||||
async with _maybe_timeout(timeout):
|
|
||||||
return await self.socket.sendto(what, destination)
|
|
||||||
raise dns.exception.Timeout(
|
|
||||||
timeout=timeout
|
|
||||||
) # pragma: no cover lgtm[py/unreachable-statement]
|
|
||||||
|
|
||||||
async def recvfrom(self, size, timeout):
|
|
||||||
async with _maybe_timeout(timeout):
|
|
||||||
return await self.socket.recvfrom(size)
|
|
||||||
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
await self.socket.close()
|
|
||||||
|
|
||||||
async def getpeername(self):
|
|
||||||
return self.socket.getpeername()
|
|
||||||
|
|
||||||
async def getsockname(self):
|
|
||||||
return self.socket.getsockname()
|
|
||||||
|
|
||||||
|
|
||||||
class StreamSocket(dns._asyncbackend.StreamSocket):
|
|
||||||
def __init__(self, socket):
|
|
||||||
self.socket = socket
|
|
||||||
self.family = socket.family
|
|
||||||
|
|
||||||
async def sendall(self, what, timeout):
|
|
||||||
async with _maybe_timeout(timeout):
|
|
||||||
return await self.socket.sendall(what)
|
|
||||||
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
|
||||||
|
|
||||||
async def recv(self, size, timeout):
|
|
||||||
async with _maybe_timeout(timeout):
|
|
||||||
return await self.socket.recv(size)
|
|
||||||
raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement]
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
await self.socket.close()
|
|
||||||
|
|
||||||
async def getpeername(self):
|
|
||||||
return self.socket.getpeername()
|
|
||||||
|
|
||||||
async def getsockname(self):
|
|
||||||
return self.socket.getsockname()
|
|
||||||
|
|
||||||
|
|
||||||
class Backend(dns._asyncbackend.Backend):
|
|
||||||
def name(self):
|
|
||||||
return "curio"
|
|
||||||
|
|
||||||
async def make_socket(
|
|
||||||
self,
|
|
||||||
af,
|
|
||||||
socktype,
|
|
||||||
proto=0,
|
|
||||||
source=None,
|
|
||||||
destination=None,
|
|
||||||
timeout=None,
|
|
||||||
ssl_context=None,
|
|
||||||
server_hostname=None,
|
|
||||||
):
|
|
||||||
if socktype == socket.SOCK_DGRAM:
|
|
||||||
s = curio.socket.socket(af, socktype, proto)
|
|
||||||
try:
|
|
||||||
if source:
|
|
||||||
s.bind(_lltuple(source, af))
|
|
||||||
except Exception: # pragma: no cover
|
|
||||||
await s.close()
|
|
||||||
raise
|
|
||||||
return DatagramSocket(s)
|
|
||||||
elif socktype == socket.SOCK_STREAM:
|
|
||||||
if source:
|
|
||||||
source_addr = _lltuple(source, af)
|
|
||||||
else:
|
|
||||||
source_addr = None
|
|
||||||
async with _maybe_timeout(timeout):
|
|
||||||
s = await curio.open_connection(
|
|
||||||
destination[0],
|
|
||||||
destination[1],
|
|
||||||
ssl=ssl_context,
|
|
||||||
source_addr=source_addr,
|
|
||||||
server_hostname=server_hostname,
|
|
||||||
)
|
|
||||||
return StreamSocket(s)
|
|
||||||
raise NotImplementedError(
|
|
||||||
"unsupported socket " + f"type {socktype}"
|
|
||||||
) # pragma: no cover
|
|
||||||
|
|
||||||
async def sleep(self, interval):
|
|
||||||
await curio.sleep(interval)
|
|
154
lib/dns/_ddr.py
Normal file
154
lib/dns/_ddr.py
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||||
|
#
|
||||||
|
# Support for Discovery of Designated Resolvers
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import time
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import dns.asyncbackend
|
||||||
|
import dns.inet
|
||||||
|
import dns.name
|
||||||
|
import dns.nameserver
|
||||||
|
import dns.query
|
||||||
|
import dns.rdtypes.svcbbase
|
||||||
|
|
||||||
|
# The special name of the local resolver when using DDR
|
||||||
|
_local_resolver_name = dns.name.from_text("_dns.resolver.arpa")
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Processing is split up into I/O independent and I/O dependent parts to
|
||||||
|
# make supporting sync and async versions easy.
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
class _SVCBInfo:
|
||||||
|
def __init__(self, bootstrap_address, port, hostname, nameservers):
|
||||||
|
self.bootstrap_address = bootstrap_address
|
||||||
|
self.port = port
|
||||||
|
self.hostname = hostname
|
||||||
|
self.nameservers = nameservers
|
||||||
|
|
||||||
|
def ddr_check_certificate(self, cert):
|
||||||
|
"""Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)"""
|
||||||
|
for name, value in cert["subjectAltName"]:
|
||||||
|
if name == "IP Address" and value == self.bootstrap_address:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def make_tls_context(self):
|
||||||
|
ssl = dns.query.ssl
|
||||||
|
ctx = ssl.create_default_context()
|
||||||
|
ctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||||
|
return ctx
|
||||||
|
|
||||||
|
def ddr_tls_check_sync(self, lifetime):
|
||||||
|
ctx = self.make_tls_context()
|
||||||
|
expiration = time.time() + lifetime
|
||||||
|
with socket.create_connection(
|
||||||
|
(self.bootstrap_address, self.port), lifetime
|
||||||
|
) as s:
|
||||||
|
with ctx.wrap_socket(s, server_hostname=self.hostname) as ts:
|
||||||
|
ts.settimeout(dns.query._remaining(expiration))
|
||||||
|
ts.do_handshake()
|
||||||
|
cert = ts.getpeercert()
|
||||||
|
return self.ddr_check_certificate(cert)
|
||||||
|
|
||||||
|
async def ddr_tls_check_async(self, lifetime, backend=None):
|
||||||
|
if backend is None:
|
||||||
|
backend = dns.asyncbackend.get_default_backend()
|
||||||
|
ctx = self.make_tls_context()
|
||||||
|
expiration = time.time() + lifetime
|
||||||
|
async with await backend.make_socket(
|
||||||
|
dns.inet.af_for_address(self.bootstrap_address),
|
||||||
|
socket.SOCK_STREAM,
|
||||||
|
0,
|
||||||
|
None,
|
||||||
|
(self.bootstrap_address, self.port),
|
||||||
|
lifetime,
|
||||||
|
ctx,
|
||||||
|
self.hostname,
|
||||||
|
) as ts:
|
||||||
|
cert = await ts.getpeercert(dns.query._remaining(expiration))
|
||||||
|
return self.ddr_check_certificate(cert)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_nameservers_from_svcb(answer):
|
||||||
|
bootstrap_address = answer.nameserver
|
||||||
|
if not dns.inet.is_address(bootstrap_address):
|
||||||
|
return []
|
||||||
|
infos = []
|
||||||
|
for rr in answer.rrset.processing_order():
|
||||||
|
nameservers = []
|
||||||
|
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN)
|
||||||
|
if param is None:
|
||||||
|
continue
|
||||||
|
alpns = set(param.ids)
|
||||||
|
host = rr.target.to_text(omit_final_dot=True)
|
||||||
|
port = None
|
||||||
|
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT)
|
||||||
|
if param is not None:
|
||||||
|
port = param.port
|
||||||
|
# For now we ignore address hints and address resolution and always use the
|
||||||
|
# bootstrap address
|
||||||
|
if b"h2" in alpns:
|
||||||
|
param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH)
|
||||||
|
if param is None or not param.value.endswith(b"{?dns}"):
|
||||||
|
continue
|
||||||
|
path = param.value[:-6].decode()
|
||||||
|
if not path.startswith("/"):
|
||||||
|
path = "/" + path
|
||||||
|
if port is None:
|
||||||
|
port = 443
|
||||||
|
url = f"https://{host}:{port}{path}"
|
||||||
|
# check the URL
|
||||||
|
try:
|
||||||
|
urlparse(url)
|
||||||
|
nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address))
|
||||||
|
except Exception:
|
||||||
|
# continue processing other ALPN types
|
||||||
|
pass
|
||||||
|
if b"dot" in alpns:
|
||||||
|
if port is None:
|
||||||
|
port = 853
|
||||||
|
nameservers.append(
|
||||||
|
dns.nameserver.DoTNameserver(bootstrap_address, port, host)
|
||||||
|
)
|
||||||
|
if b"doq" in alpns:
|
||||||
|
if port is None:
|
||||||
|
port = 853
|
||||||
|
nameservers.append(
|
||||||
|
dns.nameserver.DoQNameserver(bootstrap_address, port, True, host)
|
||||||
|
)
|
||||||
|
if len(nameservers) > 0:
|
||||||
|
infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers))
|
||||||
|
return infos
|
||||||
|
|
||||||
|
|
||||||
|
def _get_nameservers_sync(answer, lifetime):
|
||||||
|
"""Return a list of TLS-validated resolver nameservers extracted from an SVCB
|
||||||
|
answer."""
|
||||||
|
nameservers = []
|
||||||
|
infos = _extract_nameservers_from_svcb(answer)
|
||||||
|
for info in infos:
|
||||||
|
try:
|
||||||
|
if info.ddr_tls_check_sync(lifetime):
|
||||||
|
nameservers.extend(info.nameservers)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return nameservers
|
||||||
|
|
||||||
|
|
||||||
|
async def _get_nameservers_async(answer, lifetime):
|
||||||
|
"""Return a list of TLS-validated resolver nameservers extracted from an SVCB
|
||||||
|
answer."""
|
||||||
|
nameservers = []
|
||||||
|
infos = _extract_nameservers_from_svcb(answer)
|
||||||
|
for info in infos:
|
||||||
|
try:
|
||||||
|
if await info.ddr_tls_check_async(lifetime):
|
||||||
|
nameservers.extend(info.nameservers)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return nameservers
|
|
@ -7,7 +7,6 @@
|
||||||
import contextvars
|
import contextvars
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
|
|
||||||
_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False)
|
_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"""trio async I/O library query support"""
|
"""trio async I/O library query support"""
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import trio.socket # type: ignore
|
import trio.socket # type: ignore
|
||||||
|
|
||||||
|
@ -12,7 +13,7 @@ import dns.inet
|
||||||
|
|
||||||
|
|
||||||
def _maybe_timeout(timeout):
|
def _maybe_timeout(timeout):
|
||||||
if timeout:
|
if timeout is not None:
|
||||||
return trio.move_on_after(timeout)
|
return trio.move_on_after(timeout)
|
||||||
else:
|
else:
|
||||||
return dns._asyncbackend.NullContext()
|
return dns._asyncbackend.NullContext()
|
||||||
|
@ -50,6 +51,9 @@ class DatagramSocket(dns._asyncbackend.DatagramSocket):
|
||||||
async def getsockname(self):
|
async def getsockname(self):
|
||||||
return self.socket.getsockname()
|
return self.socket.getsockname()
|
||||||
|
|
||||||
|
async def getpeercert(self, timeout):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class StreamSocket(dns._asyncbackend.StreamSocket):
|
class StreamSocket(dns._asyncbackend.StreamSocket):
|
||||||
def __init__(self, family, stream, tls=False):
|
def __init__(self, family, stream, tls=False):
|
||||||
|
@ -82,6 +86,100 @@ class StreamSocket(dns._asyncbackend.StreamSocket):
|
||||||
else:
|
else:
|
||||||
return self.stream.socket.getsockname()
|
return self.stream.socket.getsockname()
|
||||||
|
|
||||||
|
async def getpeercert(self, timeout):
|
||||||
|
if self.tls:
|
||||||
|
with _maybe_timeout(timeout):
|
||||||
|
await self.stream.do_handshake()
|
||||||
|
return self.stream.getpeercert()
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import httpcore
|
||||||
|
import httpcore._backends.trio
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
_CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend
|
||||||
|
_CoreTrioStream = httpcore._backends.trio.TrioStream
|
||||||
|
|
||||||
|
from dns.query import _compute_times, _expiration_for_this_attempt, _remaining
|
||||||
|
|
||||||
|
class _NetworkBackend(_CoreAsyncNetworkBackend):
|
||||||
|
def __init__(self, resolver, local_port, bootstrap_address, family):
|
||||||
|
super().__init__()
|
||||||
|
self._local_port = local_port
|
||||||
|
self._resolver = resolver
|
||||||
|
self._bootstrap_address = bootstrap_address
|
||||||
|
self._family = family
|
||||||
|
|
||||||
|
async def connect_tcp(
|
||||||
|
self, host, port, timeout, local_address, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
addresses = []
|
||||||
|
_, expiration = _compute_times(timeout)
|
||||||
|
if dns.inet.is_address(host):
|
||||||
|
addresses.append(host)
|
||||||
|
elif self._bootstrap_address is not None:
|
||||||
|
addresses.append(self._bootstrap_address)
|
||||||
|
else:
|
||||||
|
timeout = _remaining(expiration)
|
||||||
|
family = self._family
|
||||||
|
if local_address:
|
||||||
|
family = dns.inet.af_for_address(local_address)
|
||||||
|
answers = await self._resolver.resolve_name(
|
||||||
|
host, family=family, lifetime=timeout
|
||||||
|
)
|
||||||
|
addresses = answers.addresses()
|
||||||
|
for address in addresses:
|
||||||
|
try:
|
||||||
|
af = dns.inet.af_for_address(address)
|
||||||
|
if local_address is not None or self._local_port != 0:
|
||||||
|
source = (local_address, self._local_port)
|
||||||
|
else:
|
||||||
|
source = None
|
||||||
|
destination = (address, port)
|
||||||
|
attempt_expiration = _expiration_for_this_attempt(2.0, expiration)
|
||||||
|
timeout = _remaining(attempt_expiration)
|
||||||
|
sock = await Backend().make_socket(
|
||||||
|
af, socket.SOCK_STREAM, 0, source, destination, timeout
|
||||||
|
)
|
||||||
|
return _CoreTrioStream(sock.stream)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
raise httpcore.ConnectError
|
||||||
|
|
||||||
|
async def connect_unix_socket(
|
||||||
|
self, path, timeout, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def sleep(self, seconds): # pylint: disable=signature-differs
|
||||||
|
await trio.sleep(seconds)
|
||||||
|
|
||||||
|
class _HTTPTransport(httpx.AsyncHTTPTransport):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
local_port=0,
|
||||||
|
bootstrap_address=None,
|
||||||
|
resolver=None,
|
||||||
|
family=socket.AF_UNSPEC,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
if resolver is None:
|
||||||
|
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
||||||
|
import dns.asyncresolver
|
||||||
|
|
||||||
|
resolver = dns.asyncresolver.Resolver()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._pool._network_backend = _NetworkBackend(
|
||||||
|
resolver, local_port, bootstrap_address, family
|
||||||
|
)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Backend(dns._asyncbackend.Backend):
|
class Backend(dns._asyncbackend.Backend):
|
||||||
def name(self):
|
def name(self):
|
||||||
|
@ -104,8 +202,14 @@ class Backend(dns._asyncbackend.Backend):
|
||||||
if source:
|
if source:
|
||||||
await s.bind(_lltuple(source, af))
|
await s.bind(_lltuple(source, af))
|
||||||
if socktype == socket.SOCK_STREAM:
|
if socktype == socket.SOCK_STREAM:
|
||||||
|
connected = False
|
||||||
with _maybe_timeout(timeout):
|
with _maybe_timeout(timeout):
|
||||||
await s.connect(_lltuple(destination, af))
|
await s.connect(_lltuple(destination, af))
|
||||||
|
connected = True
|
||||||
|
if not connected:
|
||||||
|
raise dns.exception.Timeout(
|
||||||
|
timeout=timeout
|
||||||
|
) # lgtm[py/unreachable-statement]
|
||||||
except Exception: # pragma: no cover
|
except Exception: # pragma: no cover
|
||||||
s.close()
|
s.close()
|
||||||
raise
|
raise
|
||||||
|
@ -130,3 +234,13 @@ class Backend(dns._asyncbackend.Backend):
|
||||||
|
|
||||||
async def sleep(self, interval):
|
async def sleep(self, interval):
|
||||||
await trio.sleep(interval)
|
await trio.sleep(interval)
|
||||||
|
|
||||||
|
def get_transport_class(self):
|
||||||
|
return _HTTPTransport
|
||||||
|
|
||||||
|
async def wait_for(self, awaitable, timeout):
|
||||||
|
with _maybe_timeout(timeout):
|
||||||
|
return await awaitable
|
||||||
|
raise dns.exception.Timeout(
|
||||||
|
timeout=timeout
|
||||||
|
) # pragma: no cover lgtm[py/unreachable-statement]
|
||||||
|
|
|
@ -5,13 +5,12 @@ from typing import Dict
|
||||||
import dns.exception
|
import dns.exception
|
||||||
|
|
||||||
# pylint: disable=unused-import
|
# pylint: disable=unused-import
|
||||||
|
from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import]
|
||||||
from dns._asyncbackend import (
|
|
||||||
Socket,
|
|
||||||
DatagramSocket,
|
|
||||||
StreamSocket,
|
|
||||||
Backend,
|
Backend,
|
||||||
) # noqa: F401 lgtm[py/unused-import]
|
DatagramSocket,
|
||||||
|
Socket,
|
||||||
|
StreamSocket,
|
||||||
|
)
|
||||||
|
|
||||||
# pylint: enable=unused-import
|
# pylint: enable=unused-import
|
||||||
|
|
||||||
|
@ -30,8 +29,8 @@ class AsyncLibraryNotFoundError(dns.exception.DNSException):
|
||||||
def get_backend(name: str) -> Backend:
|
def get_backend(name: str) -> Backend:
|
||||||
"""Get the specified asynchronous backend.
|
"""Get the specified asynchronous backend.
|
||||||
|
|
||||||
*name*, a ``str``, the name of the backend. Currently the "trio",
|
*name*, a ``str``, the name of the backend. Currently the "trio"
|
||||||
"curio", and "asyncio" backends are available.
|
and "asyncio" backends are available.
|
||||||
|
|
||||||
Raises NotImplementError if an unknown backend name is specified.
|
Raises NotImplementError if an unknown backend name is specified.
|
||||||
"""
|
"""
|
||||||
|
@ -43,10 +42,6 @@ def get_backend(name: str) -> Backend:
|
||||||
import dns._trio_backend
|
import dns._trio_backend
|
||||||
|
|
||||||
backend = dns._trio_backend.Backend()
|
backend = dns._trio_backend.Backend()
|
||||||
elif name == "curio":
|
|
||||||
import dns._curio_backend
|
|
||||||
|
|
||||||
backend = dns._curio_backend.Backend()
|
|
||||||
elif name == "asyncio":
|
elif name == "asyncio":
|
||||||
import dns._asyncio_backend
|
import dns._asyncio_backend
|
||||||
|
|
||||||
|
@ -73,9 +68,7 @@ def sniff() -> str:
|
||||||
try:
|
try:
|
||||||
return sniffio.current_async_library()
|
return sniffio.current_async_library()
|
||||||
except sniffio.AsyncLibraryNotFoundError:
|
except sniffio.AsyncLibraryNotFoundError:
|
||||||
raise AsyncLibraryNotFoundError(
|
raise AsyncLibraryNotFoundError("sniffio cannot determine async library")
|
||||||
"sniffio cannot determine " + "async library"
|
|
||||||
)
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
|
|
@ -17,39 +17,38 @@
|
||||||
|
|
||||||
"""Talk to a DNS server."""
|
"""Talk to a DNS server."""
|
||||||
|
|
||||||
from typing import Any, Dict, Optional, Tuple, Union
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import contextlib
|
import contextlib
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
import time
|
import time
|
||||||
|
from typing import Any, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
import dns.asyncbackend
|
import dns.asyncbackend
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.inet
|
import dns.inet
|
||||||
import dns.name
|
|
||||||
import dns.message
|
import dns.message
|
||||||
|
import dns.name
|
||||||
import dns.quic
|
import dns.quic
|
||||||
import dns.rcode
|
import dns.rcode
|
||||||
import dns.rdataclass
|
import dns.rdataclass
|
||||||
import dns.rdatatype
|
import dns.rdatatype
|
||||||
import dns.transaction
|
import dns.transaction
|
||||||
|
|
||||||
from dns._asyncbackend import NullContext
|
from dns._asyncbackend import NullContext
|
||||||
from dns.query import (
|
from dns.query import (
|
||||||
_compute_times,
|
|
||||||
_matches_destination,
|
|
||||||
BadResponse,
|
BadResponse,
|
||||||
ssl,
|
|
||||||
UDPMode,
|
|
||||||
_have_httpx,
|
|
||||||
_have_http2,
|
|
||||||
NoDOH,
|
NoDOH,
|
||||||
NoDOQ,
|
NoDOQ,
|
||||||
|
UDPMode,
|
||||||
|
_compute_times,
|
||||||
|
_have_http2,
|
||||||
|
_matches_destination,
|
||||||
|
_remaining,
|
||||||
|
have_doh,
|
||||||
|
ssl,
|
||||||
)
|
)
|
||||||
|
|
||||||
if _have_httpx:
|
if have_doh:
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
# for brevity
|
# for brevity
|
||||||
|
@ -73,7 +72,7 @@ def _source_tuple(af, address, port):
|
||||||
|
|
||||||
|
|
||||||
def _timeout(expiration, now=None):
|
def _timeout(expiration, now=None):
|
||||||
if expiration:
|
if expiration is not None:
|
||||||
if not now:
|
if not now:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
return max(expiration - now, 0)
|
return max(expiration - now, 0)
|
||||||
|
@ -445,9 +444,6 @@ async def tls(
|
||||||
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||||
if server_hostname is None:
|
if server_hostname is None:
|
||||||
ssl_context.check_hostname = False
|
ssl_context.check_hostname = False
|
||||||
else:
|
|
||||||
ssl_context = None
|
|
||||||
server_hostname = None
|
|
||||||
af = dns.inet.af_for_address(where)
|
af = dns.inet.af_for_address(where)
|
||||||
stuple = _source_tuple(af, source, source_port)
|
stuple = _source_tuple(af, source, source_port)
|
||||||
dtuple = (where, port)
|
dtuple = (where, port)
|
||||||
|
@ -495,6 +491,9 @@ async def https(
|
||||||
path: str = "/dns-query",
|
path: str = "/dns-query",
|
||||||
post: bool = True,
|
post: bool = True,
|
||||||
verify: Union[bool, str] = True,
|
verify: Union[bool, str] = True,
|
||||||
|
bootstrap_address: Optional[str] = None,
|
||||||
|
resolver: Optional["dns.asyncresolver.Resolver"] = None,
|
||||||
|
family: Optional[int] = socket.AF_UNSPEC,
|
||||||
) -> dns.message.Message:
|
) -> dns.message.Message:
|
||||||
"""Return the response obtained after sending a query via DNS-over-HTTPS.
|
"""Return the response obtained after sending a query via DNS-over-HTTPS.
|
||||||
|
|
||||||
|
@ -508,8 +507,10 @@ async def https(
|
||||||
parameters, exceptions, and return type of this method.
|
parameters, exceptions, and return type of this method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not _have_httpx:
|
if not have_doh:
|
||||||
raise NoDOH("httpx is not available.") # pragma: no cover
|
raise NoDOH # pragma: no cover
|
||||||
|
if client and not isinstance(client, httpx.AsyncClient):
|
||||||
|
raise ValueError("session parameter must be an httpx.AsyncClient")
|
||||||
|
|
||||||
wire = q.to_wire()
|
wire = q.to_wire()
|
||||||
try:
|
try:
|
||||||
|
@ -518,15 +519,32 @@ async def https(
|
||||||
af = None
|
af = None
|
||||||
transport = None
|
transport = None
|
||||||
headers = {"accept": "application/dns-message"}
|
headers = {"accept": "application/dns-message"}
|
||||||
if af is not None:
|
if af is not None and dns.inet.is_address(where):
|
||||||
if af == socket.AF_INET:
|
if af == socket.AF_INET:
|
||||||
url = "https://{}:{}{}".format(where, port, path)
|
url = "https://{}:{}{}".format(where, port, path)
|
||||||
elif af == socket.AF_INET6:
|
elif af == socket.AF_INET6:
|
||||||
url = "https://[{}]:{}{}".format(where, port, path)
|
url = "https://[{}]:{}{}".format(where, port, path)
|
||||||
else:
|
else:
|
||||||
url = where
|
url = where
|
||||||
if source is not None:
|
|
||||||
transport = httpx.AsyncHTTPTransport(local_address=source[0])
|
backend = dns.asyncbackend.get_default_backend()
|
||||||
|
|
||||||
|
if source is None:
|
||||||
|
local_address = None
|
||||||
|
local_port = 0
|
||||||
|
else:
|
||||||
|
local_address = source
|
||||||
|
local_port = source_port
|
||||||
|
transport = backend.get_transport_class()(
|
||||||
|
local_address=local_address,
|
||||||
|
http1=True,
|
||||||
|
http2=_have_http2,
|
||||||
|
verify=verify,
|
||||||
|
local_port=local_port,
|
||||||
|
bootstrap_address=bootstrap_address,
|
||||||
|
resolver=resolver,
|
||||||
|
family=family,
|
||||||
|
)
|
||||||
|
|
||||||
if client:
|
if client:
|
||||||
cm: contextlib.AbstractAsyncContextManager = NullContext(client)
|
cm: contextlib.AbstractAsyncContextManager = NullContext(client)
|
||||||
|
@ -545,14 +563,14 @@ async def https(
|
||||||
"content-length": str(len(wire)),
|
"content-length": str(len(wire)),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
response = await the_client.post(
|
response = await backend.wait_for(
|
||||||
url, headers=headers, content=wire, timeout=timeout
|
the_client.post(url, headers=headers, content=wire), timeout
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
wire = base64.urlsafe_b64encode(wire).rstrip(b"=")
|
wire = base64.urlsafe_b64encode(wire).rstrip(b"=")
|
||||||
twire = wire.decode() # httpx does a repr() if we give it bytes
|
twire = wire.decode() # httpx does a repr() if we give it bytes
|
||||||
response = await the_client.get(
|
response = await backend.wait_for(
|
||||||
url, headers=headers, timeout=timeout, params={"dns": twire}
|
the_client.get(url, headers=headers, params={"dns": twire}), timeout
|
||||||
)
|
)
|
||||||
|
|
||||||
# see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH
|
# see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH
|
||||||
|
@ -690,6 +708,7 @@ async def quic(
|
||||||
connection: Optional[dns.quic.AsyncQuicConnection] = None,
|
connection: Optional[dns.quic.AsyncQuicConnection] = None,
|
||||||
verify: Union[bool, str] = True,
|
verify: Union[bool, str] = True,
|
||||||
backend: Optional[dns.asyncbackend.Backend] = None,
|
backend: Optional[dns.asyncbackend.Backend] = None,
|
||||||
|
server_hostname: Optional[str] = None,
|
||||||
) -> dns.message.Message:
|
) -> dns.message.Message:
|
||||||
"""Return the response obtained after sending an asynchronous query via
|
"""Return the response obtained after sending an asynchronous query via
|
||||||
DNS-over-QUIC.
|
DNS-over-QUIC.
|
||||||
|
@ -715,14 +734,16 @@ async def quic(
|
||||||
(cfactory, mfactory) = dns.quic.factories_for_backend(backend)
|
(cfactory, mfactory) = dns.quic.factories_for_backend(backend)
|
||||||
|
|
||||||
async with cfactory() as context:
|
async with cfactory() as context:
|
||||||
async with mfactory(context, verify_mode=verify) as the_manager:
|
async with mfactory(
|
||||||
|
context, verify_mode=verify, server_name=server_hostname
|
||||||
|
) as the_manager:
|
||||||
if not connection:
|
if not connection:
|
||||||
the_connection = the_manager.connect(where, port, source, source_port)
|
the_connection = the_manager.connect(where, port, source, source_port)
|
||||||
start = time.time()
|
(start, expiration) = _compute_times(timeout)
|
||||||
stream = await the_connection.make_stream()
|
stream = await the_connection.make_stream(timeout)
|
||||||
async with stream:
|
async with stream:
|
||||||
await stream.send(wire, True)
|
await stream.send(wire, True)
|
||||||
wire = await stream.receive(timeout)
|
wire = await stream.receive(_remaining(expiration))
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
r = dns.message.from_wire(
|
r = dns.message.from_wire(
|
||||||
wire,
|
wire,
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
|
|
||||||
"""Asynchronous DNS stub resolver."""
|
"""Asynchronous DNS stub resolver."""
|
||||||
|
|
||||||
from typing import Any, Dict, Optional, Union
|
import socket
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
|
import dns._ddr
|
||||||
import dns.asyncbackend
|
import dns.asyncbackend
|
||||||
import dns.asyncquery
|
import dns.asyncquery
|
||||||
import dns.exception
|
import dns.exception
|
||||||
|
@ -31,8 +32,7 @@ import dns.rdatatype
|
||||||
import dns.resolver # lgtm[py/import-and-import-from]
|
import dns.resolver # lgtm[py/import-and-import-from]
|
||||||
|
|
||||||
# import some resolver symbols for brevity
|
# import some resolver symbols for brevity
|
||||||
from dns.resolver import NXDOMAIN, NoAnswer, NotAbsolute, NoRootSOA
|
from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute
|
||||||
|
|
||||||
|
|
||||||
# for indentation purposes below
|
# for indentation purposes below
|
||||||
_udp = dns.asyncquery.udp
|
_udp = dns.asyncquery.udp
|
||||||
|
@ -83,37 +83,19 @@ class Resolver(dns.resolver.BaseResolver):
|
||||||
assert request is not None # needed for type checking
|
assert request is not None # needed for type checking
|
||||||
done = False
|
done = False
|
||||||
while not done:
|
while not done:
|
||||||
(nameserver, port, tcp, backoff) = resolution.next_nameserver()
|
(nameserver, tcp, backoff) = resolution.next_nameserver()
|
||||||
if backoff:
|
if backoff:
|
||||||
await backend.sleep(backoff)
|
await backend.sleep(backoff)
|
||||||
timeout = self._compute_timeout(start, lifetime, resolution.errors)
|
timeout = self._compute_timeout(start, lifetime, resolution.errors)
|
||||||
try:
|
try:
|
||||||
if dns.inet.is_address(nameserver):
|
response = await nameserver.async_query(
|
||||||
if tcp:
|
|
||||||
response = await _tcp(
|
|
||||||
request,
|
request,
|
||||||
nameserver,
|
timeout=timeout,
|
||||||
timeout,
|
source=source,
|
||||||
port,
|
source_port=source_port,
|
||||||
source,
|
max_size=tcp,
|
||||||
source_port,
|
|
||||||
backend=backend,
|
backend=backend,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
response = await _udp(
|
|
||||||
request,
|
|
||||||
nameserver,
|
|
||||||
timeout,
|
|
||||||
port,
|
|
||||||
source,
|
|
||||||
source_port,
|
|
||||||
raise_on_truncation=True,
|
|
||||||
backend=backend,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
response = await dns.asyncquery.https(
|
|
||||||
request, nameserver, timeout=timeout
|
|
||||||
)
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
(_, done) = resolution.query_result(None, ex)
|
(_, done) = resolution.query_result(None, ex)
|
||||||
continue
|
continue
|
||||||
|
@ -153,6 +135,73 @@ class Resolver(dns.resolver.BaseResolver):
|
||||||
dns.reversename.from_address(ipaddr), *args, **modified_kwargs
|
dns.reversename.from_address(ipaddr), *args, **modified_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def resolve_name(
|
||||||
|
self,
|
||||||
|
name: Union[dns.name.Name, str],
|
||||||
|
family: int = socket.AF_UNSPEC,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> dns.resolver.HostAnswers:
|
||||||
|
"""Use an asynchronous resolver to query for address records.
|
||||||
|
|
||||||
|
This utilizes the resolve() method to perform A and/or AAAA lookups on
|
||||||
|
the specified name.
|
||||||
|
|
||||||
|
*qname*, a ``dns.name.Name`` or ``str``, the name to resolve.
|
||||||
|
|
||||||
|
*family*, an ``int``, the address family. If socket.AF_UNSPEC
|
||||||
|
(the default), both A and AAAA records will be retrieved.
|
||||||
|
|
||||||
|
All other arguments that can be passed to the resolve() function
|
||||||
|
except for rdtype and rdclass are also supported by this
|
||||||
|
function.
|
||||||
|
"""
|
||||||
|
# We make a modified kwargs for type checking happiness, as otherwise
|
||||||
|
# we get a legit warning about possibly having rdtype and rdclass
|
||||||
|
# in the kwargs more than once.
|
||||||
|
modified_kwargs: Dict[str, Any] = {}
|
||||||
|
modified_kwargs.update(kwargs)
|
||||||
|
modified_kwargs.pop("rdtype", None)
|
||||||
|
modified_kwargs["rdclass"] = dns.rdataclass.IN
|
||||||
|
|
||||||
|
if family == socket.AF_INET:
|
||||||
|
v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs)
|
||||||
|
return dns.resolver.HostAnswers.make(v4=v4)
|
||||||
|
elif family == socket.AF_INET6:
|
||||||
|
v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs)
|
||||||
|
return dns.resolver.HostAnswers.make(v6=v6)
|
||||||
|
elif family != socket.AF_UNSPEC:
|
||||||
|
raise NotImplementedError(f"unknown address family {family}")
|
||||||
|
|
||||||
|
raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True)
|
||||||
|
lifetime = modified_kwargs.pop("lifetime", None)
|
||||||
|
start = time.time()
|
||||||
|
v6 = await self.resolve(
|
||||||
|
name,
|
||||||
|
dns.rdatatype.AAAA,
|
||||||
|
raise_on_no_answer=False,
|
||||||
|
lifetime=self._compute_timeout(start, lifetime),
|
||||||
|
**modified_kwargs,
|
||||||
|
)
|
||||||
|
# Note that setting name ensures we query the same name
|
||||||
|
# for A as we did for AAAA. (This is just in case search lists
|
||||||
|
# are active by default in the resolver configuration and
|
||||||
|
# we might be talking to a server that says NXDOMAIN when it
|
||||||
|
# wants to say NOERROR no data.
|
||||||
|
name = v6.qname
|
||||||
|
v4 = await self.resolve(
|
||||||
|
name,
|
||||||
|
dns.rdatatype.A,
|
||||||
|
raise_on_no_answer=False,
|
||||||
|
lifetime=self._compute_timeout(start, lifetime),
|
||||||
|
**modified_kwargs,
|
||||||
|
)
|
||||||
|
answers = dns.resolver.HostAnswers.make(
|
||||||
|
v6=v6, v4=v4, add_empty=not raise_on_no_answer
|
||||||
|
)
|
||||||
|
if not answers:
|
||||||
|
raise NoAnswer(response=v6.response)
|
||||||
|
return answers
|
||||||
|
|
||||||
# pylint: disable=redefined-outer-name
|
# pylint: disable=redefined-outer-name
|
||||||
|
|
||||||
async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name:
|
async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name:
|
||||||
|
@ -176,6 +225,37 @@ class Resolver(dns.resolver.BaseResolver):
|
||||||
canonical_name = e.canonical_name
|
canonical_name = e.canonical_name
|
||||||
return canonical_name
|
return canonical_name
|
||||||
|
|
||||||
|
async def try_ddr(self, lifetime: float = 5.0) -> None:
|
||||||
|
"""Try to update the resolver's nameservers using Discovery of Designated
|
||||||
|
Resolvers (DDR). If successful, the resolver will subsequently use
|
||||||
|
DNS-over-HTTPS or DNS-over-TLS for future queries.
|
||||||
|
|
||||||
|
*lifetime*, a float, is the maximum time to spend attempting DDR. The default
|
||||||
|
is 5 seconds.
|
||||||
|
|
||||||
|
If the SVCB query is successful and results in a non-empty list of nameservers,
|
||||||
|
then the resolver's nameservers are set to the returned servers in priority
|
||||||
|
order.
|
||||||
|
|
||||||
|
The current implementation does not use any address hints from the SVCB record,
|
||||||
|
nor does it resolve addresses for the SCVB target name, rather it assumes that
|
||||||
|
the bootstrap nameserver will always be one of the addresses and uses it.
|
||||||
|
A future revision to the code may offer fuller support. The code verifies that
|
||||||
|
the bootstrap nameserver is in the Subject Alternative Name field of the
|
||||||
|
TLS certficate.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
expiration = time.time() + lifetime
|
||||||
|
answer = await self.resolve(
|
||||||
|
dns._ddr._local_resolver_name, "svcb", lifetime=lifetime
|
||||||
|
)
|
||||||
|
timeout = dns.query._remaining(expiration)
|
||||||
|
nameservers = await dns._ddr._get_nameservers_async(answer, timeout)
|
||||||
|
if len(nameservers) > 0:
|
||||||
|
self.nameservers = nameservers
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
default_resolver = None
|
default_resolver = None
|
||||||
|
|
||||||
|
@ -246,6 +326,18 @@ async def resolve_address(
|
||||||
return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs)
|
return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
async def resolve_name(
|
||||||
|
name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any
|
||||||
|
) -> dns.resolver.HostAnswers:
|
||||||
|
"""Use a resolver to asynchronously query for address records.
|
||||||
|
|
||||||
|
See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more
|
||||||
|
information on the parameters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return await get_default_resolver().resolve_name(name, family, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name:
|
async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name:
|
||||||
"""Determine the canonical name of *name*.
|
"""Determine the canonical name of *name*.
|
||||||
|
|
||||||
|
@ -256,6 +348,16 @@ async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name:
|
||||||
return await get_default_resolver().canonical_name(name)
|
return await get_default_resolver().canonical_name(name)
|
||||||
|
|
||||||
|
|
||||||
|
async def try_ddr(timeout: float = 5.0) -> None:
|
||||||
|
"""Try to update the default resolver's nameservers using Discovery of Designated
|
||||||
|
Resolvers (DDR). If successful, the resolver will subsequently use
|
||||||
|
DNS-over-HTTPS or DNS-over-TLS for future queries.
|
||||||
|
|
||||||
|
See :py:func:`dns.resolver.Resolver.try_ddr` for more information.
|
||||||
|
"""
|
||||||
|
return await get_default_resolver().try_ddr(timeout)
|
||||||
|
|
||||||
|
|
||||||
async def zone_for_name(
|
async def zone_for_name(
|
||||||
name: Union[dns.name.Name, str],
|
name: Union[dns.name.Name, str],
|
||||||
rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN,
|
rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN,
|
||||||
|
@ -290,3 +392,84 @@ async def zone_for_name(
|
||||||
name = name.parent()
|
name = name.parent()
|
||||||
except dns.name.NoParent: # pragma: no cover
|
except dns.name.NoParent: # pragma: no cover
|
||||||
raise NoRootSOA
|
raise NoRootSOA
|
||||||
|
|
||||||
|
|
||||||
|
async def make_resolver_at(
|
||||||
|
where: Union[dns.name.Name, str],
|
||||||
|
port: int = 53,
|
||||||
|
family: int = socket.AF_UNSPEC,
|
||||||
|
resolver: Optional[Resolver] = None,
|
||||||
|
) -> Resolver:
|
||||||
|
"""Make a stub resolver using the specified destination as the full resolver.
|
||||||
|
|
||||||
|
*where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the
|
||||||
|
full resolver.
|
||||||
|
|
||||||
|
*port*, an ``int``, the port to use. If not specified, the default is 53.
|
||||||
|
|
||||||
|
*family*, an ``int``, the address family to use. This parameter is used if
|
||||||
|
*where* is not an address. The default is ``socket.AF_UNSPEC`` in which case
|
||||||
|
the first address returned by ``resolve_name()`` will be used, otherwise the
|
||||||
|
first address of the specified family will be used.
|
||||||
|
|
||||||
|
*resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for
|
||||||
|
resolution of hostnames. If not specified, the default resolver will be used.
|
||||||
|
|
||||||
|
Returns a ``dns.resolver.Resolver`` or raises an exception.
|
||||||
|
"""
|
||||||
|
if resolver is None:
|
||||||
|
resolver = get_default_resolver()
|
||||||
|
nameservers: List[Union[str, dns.nameserver.Nameserver]] = []
|
||||||
|
if isinstance(where, str) and dns.inet.is_address(where):
|
||||||
|
nameservers.append(dns.nameserver.Do53Nameserver(where, port))
|
||||||
|
else:
|
||||||
|
answers = await resolver.resolve_name(where, family)
|
||||||
|
for address in answers.addresses():
|
||||||
|
nameservers.append(dns.nameserver.Do53Nameserver(address, port))
|
||||||
|
res = dns.asyncresolver.Resolver(configure=False)
|
||||||
|
res.nameservers = nameservers
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
async def resolve_at(
|
||||||
|
where: Union[dns.name.Name, str],
|
||||||
|
qname: Union[dns.name.Name, str],
|
||||||
|
rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A,
|
||||||
|
rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN,
|
||||||
|
tcp: bool = False,
|
||||||
|
source: Optional[str] = None,
|
||||||
|
raise_on_no_answer: bool = True,
|
||||||
|
source_port: int = 0,
|
||||||
|
lifetime: Optional[float] = None,
|
||||||
|
search: Optional[bool] = None,
|
||||||
|
backend: Optional[dns.asyncbackend.Backend] = None,
|
||||||
|
port: int = 53,
|
||||||
|
family: int = socket.AF_UNSPEC,
|
||||||
|
resolver: Optional[Resolver] = None,
|
||||||
|
) -> dns.resolver.Answer:
|
||||||
|
"""Query nameservers to find the answer to the question.
|
||||||
|
|
||||||
|
This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()``
|
||||||
|
to make a resolver, and then uses it to resolve the query.
|
||||||
|
|
||||||
|
See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution
|
||||||
|
parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the
|
||||||
|
resolver parameters *where*, *port*, *family*, and *resolver*.
|
||||||
|
|
||||||
|
If making more than one query, it is more efficient to call
|
||||||
|
``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries
|
||||||
|
instead of calling ``resolve_at()`` multiple times.
|
||||||
|
"""
|
||||||
|
res = await make_resolver_at(where, port, family, resolver)
|
||||||
|
return await res.resolve(
|
||||||
|
qname,
|
||||||
|
rdtype,
|
||||||
|
rdclass,
|
||||||
|
tcp,
|
||||||
|
source,
|
||||||
|
raise_on_no_answer,
|
||||||
|
source_port,
|
||||||
|
lifetime,
|
||||||
|
search,
|
||||||
|
backend,
|
||||||
|
)
|
||||||
|
|
|
@ -17,50 +17,44 @@
|
||||||
|
|
||||||
"""Common DNSSEC-related functions and constants."""
|
"""Common DNSSEC-related functions and constants."""
|
||||||
|
|
||||||
from typing import Any, cast, Dict, List, Optional, Set, Tuple, Union
|
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import contextlib
|
||||||
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import math
|
|
||||||
import struct
|
import struct
|
||||||
import time
|
import time
|
||||||
import base64
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast
|
||||||
from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash
|
|
||||||
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.name
|
import dns.name
|
||||||
import dns.node
|
import dns.node
|
||||||
import dns.rdataset
|
|
||||||
import dns.rdata
|
import dns.rdata
|
||||||
import dns.rdatatype
|
|
||||||
import dns.rdataclass
|
import dns.rdataclass
|
||||||
|
import dns.rdataset
|
||||||
|
import dns.rdatatype
|
||||||
import dns.rrset
|
import dns.rrset
|
||||||
|
import dns.transaction
|
||||||
|
import dns.zone
|
||||||
|
from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash
|
||||||
|
from dns.exception import ( # pylint: disable=W0611
|
||||||
|
AlgorithmKeyMismatch,
|
||||||
|
DeniedByPolicy,
|
||||||
|
UnsupportedAlgorithm,
|
||||||
|
ValidationFailure,
|
||||||
|
)
|
||||||
from dns.rdtypes.ANY.CDNSKEY import CDNSKEY
|
from dns.rdtypes.ANY.CDNSKEY import CDNSKEY
|
||||||
from dns.rdtypes.ANY.CDS import CDS
|
from dns.rdtypes.ANY.CDS import CDS
|
||||||
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
from dns.rdtypes.ANY.DS import DS
|
from dns.rdtypes.ANY.DS import DS
|
||||||
|
from dns.rdtypes.ANY.NSEC import NSEC, Bitmap
|
||||||
|
from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM
|
||||||
from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime
|
from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime
|
||||||
from dns.rdtypes.dnskeybase import Flag
|
from dns.rdtypes.dnskeybase import Flag
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedAlgorithm(dns.exception.DNSException):
|
|
||||||
"""The DNSSEC algorithm is not supported."""
|
|
||||||
|
|
||||||
|
|
||||||
class AlgorithmKeyMismatch(UnsupportedAlgorithm):
|
|
||||||
"""The DNSSEC algorithm is not supported for the given key type."""
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationFailure(dns.exception.DNSException):
|
|
||||||
"""The DNSSEC signature is invalid."""
|
|
||||||
|
|
||||||
|
|
||||||
class DeniedByPolicy(dns.exception.DNSException):
|
|
||||||
"""Denied by DNSSEC policy."""
|
|
||||||
|
|
||||||
|
|
||||||
PublicKey = Union[
|
PublicKey = Union[
|
||||||
|
"GenericPublicKey",
|
||||||
"rsa.RSAPublicKey",
|
"rsa.RSAPublicKey",
|
||||||
"ec.EllipticCurvePublicKey",
|
"ec.EllipticCurvePublicKey",
|
||||||
"ed25519.Ed25519PublicKey",
|
"ed25519.Ed25519PublicKey",
|
||||||
|
@ -68,12 +62,15 @@ PublicKey = Union[
|
||||||
]
|
]
|
||||||
|
|
||||||
PrivateKey = Union[
|
PrivateKey = Union[
|
||||||
|
"GenericPrivateKey",
|
||||||
"rsa.RSAPrivateKey",
|
"rsa.RSAPrivateKey",
|
||||||
"ec.EllipticCurvePrivateKey",
|
"ec.EllipticCurvePrivateKey",
|
||||||
"ed25519.Ed25519PrivateKey",
|
"ed25519.Ed25519PrivateKey",
|
||||||
"ed448.Ed448PrivateKey",
|
"ed448.Ed448PrivateKey",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None]
|
||||||
|
|
||||||
|
|
||||||
def algorithm_from_text(text: str) -> Algorithm:
|
def algorithm_from_text(text: str) -> Algorithm:
|
||||||
"""Convert text into a DNSSEC algorithm value.
|
"""Convert text into a DNSSEC algorithm value.
|
||||||
|
@ -308,113 +305,13 @@ def _find_candidate_keys(
|
||||||
return [
|
return [
|
||||||
cast(DNSKEY, rd)
|
cast(DNSKEY, rd)
|
||||||
for rd in rdataset
|
for rd in rdataset
|
||||||
if rd.algorithm == rrsig.algorithm and key_id(rd) == rrsig.key_tag
|
if rd.algorithm == rrsig.algorithm
|
||||||
|
and key_id(rd) == rrsig.key_tag
|
||||||
|
and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1
|
||||||
|
and rd.protocol == 3 # RFC 4034 2.1.2
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _is_rsa(algorithm: int) -> bool:
|
|
||||||
return algorithm in (
|
|
||||||
Algorithm.RSAMD5,
|
|
||||||
Algorithm.RSASHA1,
|
|
||||||
Algorithm.RSASHA1NSEC3SHA1,
|
|
||||||
Algorithm.RSASHA256,
|
|
||||||
Algorithm.RSASHA512,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_dsa(algorithm: int) -> bool:
|
|
||||||
return algorithm in (Algorithm.DSA, Algorithm.DSANSEC3SHA1)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_ecdsa(algorithm: int) -> bool:
|
|
||||||
return algorithm in (Algorithm.ECDSAP256SHA256, Algorithm.ECDSAP384SHA384)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_eddsa(algorithm: int) -> bool:
|
|
||||||
return algorithm in (Algorithm.ED25519, Algorithm.ED448)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_gost(algorithm: int) -> bool:
|
|
||||||
return algorithm == Algorithm.ECCGOST
|
|
||||||
|
|
||||||
|
|
||||||
def _is_md5(algorithm: int) -> bool:
|
|
||||||
return algorithm == Algorithm.RSAMD5
|
|
||||||
|
|
||||||
|
|
||||||
def _is_sha1(algorithm: int) -> bool:
|
|
||||||
return algorithm in (
|
|
||||||
Algorithm.DSA,
|
|
||||||
Algorithm.RSASHA1,
|
|
||||||
Algorithm.DSANSEC3SHA1,
|
|
||||||
Algorithm.RSASHA1NSEC3SHA1,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_sha256(algorithm: int) -> bool:
|
|
||||||
return algorithm in (Algorithm.RSASHA256, Algorithm.ECDSAP256SHA256)
|
|
||||||
|
|
||||||
|
|
||||||
def _is_sha384(algorithm: int) -> bool:
|
|
||||||
return algorithm == Algorithm.ECDSAP384SHA384
|
|
||||||
|
|
||||||
|
|
||||||
def _is_sha512(algorithm: int) -> bool:
|
|
||||||
return algorithm == Algorithm.RSASHA512
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_algorithm_key_combination(algorithm: int, key: PublicKey) -> None:
|
|
||||||
"""Ensure algorithm is valid for key type, throwing an exception on
|
|
||||||
mismatch."""
|
|
||||||
if isinstance(key, rsa.RSAPublicKey):
|
|
||||||
if _is_rsa(algorithm):
|
|
||||||
return
|
|
||||||
raise AlgorithmKeyMismatch('algorithm "%s" not valid for RSA key' % algorithm)
|
|
||||||
if isinstance(key, dsa.DSAPublicKey):
|
|
||||||
if _is_dsa(algorithm):
|
|
||||||
return
|
|
||||||
raise AlgorithmKeyMismatch('algorithm "%s" not valid for DSA key' % algorithm)
|
|
||||||
if isinstance(key, ec.EllipticCurvePublicKey):
|
|
||||||
if _is_ecdsa(algorithm):
|
|
||||||
return
|
|
||||||
raise AlgorithmKeyMismatch('algorithm "%s" not valid for ECDSA key' % algorithm)
|
|
||||||
if isinstance(key, ed25519.Ed25519PublicKey):
|
|
||||||
if algorithm == Algorithm.ED25519:
|
|
||||||
return
|
|
||||||
raise AlgorithmKeyMismatch(
|
|
||||||
'algorithm "%s" not valid for ED25519 key' % algorithm
|
|
||||||
)
|
|
||||||
if isinstance(key, ed448.Ed448PublicKey):
|
|
||||||
if algorithm == Algorithm.ED448:
|
|
||||||
return
|
|
||||||
raise AlgorithmKeyMismatch('algorithm "%s" not valid for ED448 key' % algorithm)
|
|
||||||
|
|
||||||
raise TypeError("unsupported key type")
|
|
||||||
|
|
||||||
|
|
||||||
def _make_hash(algorithm: int) -> Any:
|
|
||||||
if _is_md5(algorithm):
|
|
||||||
return hashes.MD5()
|
|
||||||
if _is_sha1(algorithm):
|
|
||||||
return hashes.SHA1()
|
|
||||||
if _is_sha256(algorithm):
|
|
||||||
return hashes.SHA256()
|
|
||||||
if _is_sha384(algorithm):
|
|
||||||
return hashes.SHA384()
|
|
||||||
if _is_sha512(algorithm):
|
|
||||||
return hashes.SHA512()
|
|
||||||
if algorithm == Algorithm.ED25519:
|
|
||||||
return hashes.SHA512()
|
|
||||||
if algorithm == Algorithm.ED448:
|
|
||||||
return hashes.SHAKE256(114)
|
|
||||||
|
|
||||||
raise ValidationFailure("unknown hash for algorithm %u" % algorithm)
|
|
||||||
|
|
||||||
|
|
||||||
def _bytes_to_long(b: bytes) -> int:
|
|
||||||
return int.from_bytes(b, "big")
|
|
||||||
|
|
||||||
|
|
||||||
def _get_rrname_rdataset(
|
def _get_rrname_rdataset(
|
||||||
rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]],
|
rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]],
|
||||||
) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]:
|
) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]:
|
||||||
|
@ -424,85 +321,13 @@ def _get_rrname_rdataset(
|
||||||
return rrset.name, rrset
|
return rrset.name, rrset
|
||||||
|
|
||||||
|
|
||||||
def _validate_signature(sig: bytes, data: bytes, key: DNSKEY, chosen_hash: Any) -> None:
|
def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None:
|
||||||
keyptr: bytes
|
public_cls = get_algorithm_cls_from_dnskey(key).public_cls
|
||||||
if _is_rsa(key.algorithm):
|
|
||||||
# we ignore because mypy is confused and thinks key.key is a str for unknown
|
|
||||||
# reasons.
|
|
||||||
keyptr = key.key
|
|
||||||
(bytes_,) = struct.unpack("!B", keyptr[0:1])
|
|
||||||
keyptr = keyptr[1:]
|
|
||||||
if bytes_ == 0:
|
|
||||||
(bytes_,) = struct.unpack("!H", keyptr[0:2])
|
|
||||||
keyptr = keyptr[2:]
|
|
||||||
rsa_e = keyptr[0:bytes_]
|
|
||||||
rsa_n = keyptr[bytes_:]
|
|
||||||
try:
|
try:
|
||||||
rsa_public_key = rsa.RSAPublicNumbers(
|
public_key = public_cls.from_dnskey(key)
|
||||||
_bytes_to_long(rsa_e), _bytes_to_long(rsa_n)
|
|
||||||
).public_key(default_backend())
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValidationFailure("invalid public key")
|
raise ValidationFailure("invalid public key")
|
||||||
rsa_public_key.verify(sig, data, padding.PKCS1v15(), chosen_hash)
|
public_key.verify(sig, data)
|
||||||
elif _is_dsa(key.algorithm):
|
|
||||||
keyptr = key.key
|
|
||||||
(t,) = struct.unpack("!B", keyptr[0:1])
|
|
||||||
keyptr = keyptr[1:]
|
|
||||||
octets = 64 + t * 8
|
|
||||||
dsa_q = keyptr[0:20]
|
|
||||||
keyptr = keyptr[20:]
|
|
||||||
dsa_p = keyptr[0:octets]
|
|
||||||
keyptr = keyptr[octets:]
|
|
||||||
dsa_g = keyptr[0:octets]
|
|
||||||
keyptr = keyptr[octets:]
|
|
||||||
dsa_y = keyptr[0:octets]
|
|
||||||
try:
|
|
||||||
dsa_public_key = dsa.DSAPublicNumbers( # type: ignore
|
|
||||||
_bytes_to_long(dsa_y),
|
|
||||||
dsa.DSAParameterNumbers(
|
|
||||||
_bytes_to_long(dsa_p), _bytes_to_long(dsa_q), _bytes_to_long(dsa_g)
|
|
||||||
),
|
|
||||||
).public_key(default_backend())
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationFailure("invalid public key")
|
|
||||||
dsa_public_key.verify(sig, data, chosen_hash)
|
|
||||||
elif _is_ecdsa(key.algorithm):
|
|
||||||
keyptr = key.key
|
|
||||||
curve: Any
|
|
||||||
if key.algorithm == Algorithm.ECDSAP256SHA256:
|
|
||||||
curve = ec.SECP256R1()
|
|
||||||
octets = 32
|
|
||||||
else:
|
|
||||||
curve = ec.SECP384R1()
|
|
||||||
octets = 48
|
|
||||||
ecdsa_x = keyptr[0:octets]
|
|
||||||
ecdsa_y = keyptr[octets : octets * 2]
|
|
||||||
try:
|
|
||||||
ecdsa_public_key = ec.EllipticCurvePublicNumbers(
|
|
||||||
curve=curve, x=_bytes_to_long(ecdsa_x), y=_bytes_to_long(ecdsa_y)
|
|
||||||
).public_key(default_backend())
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationFailure("invalid public key")
|
|
||||||
ecdsa_public_key.verify(sig, data, ec.ECDSA(chosen_hash))
|
|
||||||
elif _is_eddsa(key.algorithm):
|
|
||||||
keyptr = key.key
|
|
||||||
loader: Any
|
|
||||||
if key.algorithm == Algorithm.ED25519:
|
|
||||||
loader = ed25519.Ed25519PublicKey
|
|
||||||
else:
|
|
||||||
loader = ed448.Ed448PublicKey
|
|
||||||
try:
|
|
||||||
eddsa_public_key = loader.from_public_bytes(keyptr)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationFailure("invalid public key")
|
|
||||||
eddsa_public_key.verify(sig, data)
|
|
||||||
elif _is_gost(key.algorithm):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
'algorithm "%s" not supported by dnspython'
|
|
||||||
% algorithm_to_text(key.algorithm)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise ValidationFailure("unknown algorithm %u" % key.algorithm)
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_rrsig(
|
def _validate_rrsig(
|
||||||
|
@ -559,29 +384,13 @@ def _validate_rrsig(
|
||||||
if rrsig.inception > now:
|
if rrsig.inception > now:
|
||||||
raise ValidationFailure("not yet valid")
|
raise ValidationFailure("not yet valid")
|
||||||
|
|
||||||
if _is_dsa(rrsig.algorithm):
|
|
||||||
sig_r = rrsig.signature[1:21]
|
|
||||||
sig_s = rrsig.signature[21:]
|
|
||||||
sig = utils.encode_dss_signature(_bytes_to_long(sig_r), _bytes_to_long(sig_s))
|
|
||||||
elif _is_ecdsa(rrsig.algorithm):
|
|
||||||
if rrsig.algorithm == Algorithm.ECDSAP256SHA256:
|
|
||||||
octets = 32
|
|
||||||
else:
|
|
||||||
octets = 48
|
|
||||||
sig_r = rrsig.signature[0:octets]
|
|
||||||
sig_s = rrsig.signature[octets:]
|
|
||||||
sig = utils.encode_dss_signature(_bytes_to_long(sig_r), _bytes_to_long(sig_s))
|
|
||||||
else:
|
|
||||||
sig = rrsig.signature
|
|
||||||
|
|
||||||
data = _make_rrsig_signature_data(rrset, rrsig, origin)
|
data = _make_rrsig_signature_data(rrset, rrsig, origin)
|
||||||
chosen_hash = _make_hash(rrsig.algorithm)
|
|
||||||
|
|
||||||
for candidate_key in candidate_keys:
|
for candidate_key in candidate_keys:
|
||||||
if not policy.ok_to_validate(candidate_key):
|
if not policy.ok_to_validate(candidate_key):
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
_validate_signature(sig, data, candidate_key, chosen_hash)
|
_validate_signature(rrsig.signature, data, candidate_key)
|
||||||
return
|
return
|
||||||
except (InvalidSignature, ValidationFailure):
|
except (InvalidSignature, ValidationFailure):
|
||||||
# this happens on an individual validation failure
|
# this happens on an individual validation failure
|
||||||
|
@ -673,6 +482,7 @@ def _sign(
|
||||||
lifetime: Optional[int] = None,
|
lifetime: Optional[int] = None,
|
||||||
verify: bool = False,
|
verify: bool = False,
|
||||||
policy: Optional[Policy] = None,
|
policy: Optional[Policy] = None,
|
||||||
|
origin: Optional[dns.name.Name] = None,
|
||||||
) -> RRSIG:
|
) -> RRSIG:
|
||||||
"""Sign RRset using private key.
|
"""Sign RRset using private key.
|
||||||
|
|
||||||
|
@ -708,6 +518,10 @@ def _sign(
|
||||||
*policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy,
|
*policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy,
|
||||||
``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624.
|
``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624.
|
||||||
|
|
||||||
|
*origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all
|
||||||
|
names in the rrset (including its owner name) must be absolute; otherwise the
|
||||||
|
specified origin will be used to make names absolute when signing.
|
||||||
|
|
||||||
Raises ``DeniedByPolicy`` if the signature is denied by policy.
|
Raises ``DeniedByPolicy`` if the signature is denied by policy.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -735,16 +549,26 @@ def _sign(
|
||||||
if expiration is not None:
|
if expiration is not None:
|
||||||
rrsig_expiration = to_timestamp(expiration)
|
rrsig_expiration = to_timestamp(expiration)
|
||||||
elif lifetime is not None:
|
elif lifetime is not None:
|
||||||
rrsig_expiration = int(time.time()) + lifetime
|
rrsig_expiration = rrsig_inception + lifetime
|
||||||
else:
|
else:
|
||||||
raise ValueError("expiration or lifetime must be specified")
|
raise ValueError("expiration or lifetime must be specified")
|
||||||
|
|
||||||
|
# Derelativize now because we need a correct labels length for the
|
||||||
|
# rrsig_template.
|
||||||
|
if origin is not None:
|
||||||
|
rrname = rrname.derelativize(origin)
|
||||||
|
labels = len(rrname) - 1
|
||||||
|
|
||||||
|
# Adjust labels appropriately for wildcards.
|
||||||
|
if rrname.is_wild():
|
||||||
|
labels -= 1
|
||||||
|
|
||||||
rrsig_template = RRSIG(
|
rrsig_template = RRSIG(
|
||||||
rdclass=rdclass,
|
rdclass=rdclass,
|
||||||
rdtype=dns.rdatatype.RRSIG,
|
rdtype=dns.rdatatype.RRSIG,
|
||||||
type_covered=rdtype,
|
type_covered=rdtype,
|
||||||
algorithm=dnskey.algorithm,
|
algorithm=dnskey.algorithm,
|
||||||
labels=len(rrname) - 1,
|
labels=labels,
|
||||||
original_ttl=original_ttl,
|
original_ttl=original_ttl,
|
||||||
expiration=rrsig_expiration,
|
expiration=rrsig_expiration,
|
||||||
inception=rrsig_inception,
|
inception=rrsig_inception,
|
||||||
|
@ -753,64 +577,19 @@ def _sign(
|
||||||
signature=b"",
|
signature=b"",
|
||||||
)
|
)
|
||||||
|
|
||||||
data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template)
|
data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template, origin)
|
||||||
chosen_hash = _make_hash(rrsig_template.algorithm)
|
|
||||||
signature = None
|
|
||||||
|
|
||||||
if isinstance(private_key, rsa.RSAPrivateKey):
|
if isinstance(private_key, GenericPrivateKey):
|
||||||
if not _is_rsa(dnskey.algorithm):
|
signing_key = private_key
|
||||||
raise ValueError("Invalid DNSKEY algorithm for RSA key")
|
|
||||||
signature = private_key.sign(data, padding.PKCS1v15(), chosen_hash)
|
|
||||||
if verify:
|
|
||||||
private_key.public_key().verify(
|
|
||||||
signature, data, padding.PKCS1v15(), chosen_hash
|
|
||||||
)
|
|
||||||
elif isinstance(private_key, dsa.DSAPrivateKey):
|
|
||||||
if not _is_dsa(dnskey.algorithm):
|
|
||||||
raise ValueError("Invalid DNSKEY algorithm for DSA key")
|
|
||||||
public_dsa_key = private_key.public_key()
|
|
||||||
if public_dsa_key.key_size > 1024:
|
|
||||||
raise ValueError("DSA key size overflow")
|
|
||||||
der_signature = private_key.sign(data, chosen_hash)
|
|
||||||
if verify:
|
|
||||||
public_dsa_key.verify(der_signature, data, chosen_hash)
|
|
||||||
dsa_r, dsa_s = utils.decode_dss_signature(der_signature)
|
|
||||||
dsa_t = (public_dsa_key.key_size // 8 - 64) // 8
|
|
||||||
octets = 20
|
|
||||||
signature = (
|
|
||||||
struct.pack("!B", dsa_t)
|
|
||||||
+ int.to_bytes(dsa_r, length=octets, byteorder="big")
|
|
||||||
+ int.to_bytes(dsa_s, length=octets, byteorder="big")
|
|
||||||
)
|
|
||||||
elif isinstance(private_key, ec.EllipticCurvePrivateKey):
|
|
||||||
if not _is_ecdsa(dnskey.algorithm):
|
|
||||||
raise ValueError("Invalid DNSKEY algorithm for EC key")
|
|
||||||
der_signature = private_key.sign(data, ec.ECDSA(chosen_hash))
|
|
||||||
if verify:
|
|
||||||
private_key.public_key().verify(der_signature, data, ec.ECDSA(chosen_hash))
|
|
||||||
if dnskey.algorithm == Algorithm.ECDSAP256SHA256:
|
|
||||||
octets = 32
|
|
||||||
else:
|
|
||||||
octets = 48
|
|
||||||
dsa_r, dsa_s = utils.decode_dss_signature(der_signature)
|
|
||||||
signature = int.to_bytes(dsa_r, length=octets, byteorder="big") + int.to_bytes(
|
|
||||||
dsa_s, length=octets, byteorder="big"
|
|
||||||
)
|
|
||||||
elif isinstance(private_key, ed25519.Ed25519PrivateKey):
|
|
||||||
if dnskey.algorithm != Algorithm.ED25519:
|
|
||||||
raise ValueError("Invalid DNSKEY algorithm for ED25519 key")
|
|
||||||
signature = private_key.sign(data)
|
|
||||||
if verify:
|
|
||||||
private_key.public_key().verify(signature, data)
|
|
||||||
elif isinstance(private_key, ed448.Ed448PrivateKey):
|
|
||||||
if dnskey.algorithm != Algorithm.ED448:
|
|
||||||
raise ValueError("Invalid DNSKEY algorithm for ED448 key")
|
|
||||||
signature = private_key.sign(data)
|
|
||||||
if verify:
|
|
||||||
private_key.public_key().verify(signature, data)
|
|
||||||
else:
|
else:
|
||||||
|
try:
|
||||||
|
private_cls = get_algorithm_cls_from_dnskey(dnskey)
|
||||||
|
signing_key = private_cls(key=private_key)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
raise TypeError("Unsupported key algorithm")
|
raise TypeError("Unsupported key algorithm")
|
||||||
|
|
||||||
|
signature = signing_key.sign(data, verify)
|
||||||
|
|
||||||
return cast(RRSIG, rrsig_template.replace(signature=signature))
|
return cast(RRSIG, rrsig_template.replace(signature=signature))
|
||||||
|
|
||||||
|
|
||||||
|
@ -858,9 +637,12 @@ def _make_rrsig_signature_data(
|
||||||
raise ValidationFailure("relative RR name without an origin specified")
|
raise ValidationFailure("relative RR name without an origin specified")
|
||||||
rrname = rrname.derelativize(origin)
|
rrname = rrname.derelativize(origin)
|
||||||
|
|
||||||
if len(rrname) - 1 < rrsig.labels:
|
name_len = len(rrname)
|
||||||
|
if rrname.is_wild() and rrsig.labels != name_len - 2:
|
||||||
|
raise ValidationFailure("wild owner name has wrong label length")
|
||||||
|
if name_len - 1 < rrsig.labels:
|
||||||
raise ValidationFailure("owner name longer than RRSIG labels")
|
raise ValidationFailure("owner name longer than RRSIG labels")
|
||||||
elif rrsig.labels < len(rrname) - 1:
|
elif rrsig.labels < name_len - 1:
|
||||||
suffix = rrname.split(rrsig.labels + 1)[1]
|
suffix = rrname.split(rrsig.labels + 1)[1]
|
||||||
rrname = dns.name.from_text("*", suffix)
|
rrname = dns.name.from_text("*", suffix)
|
||||||
rrnamebuf = rrname.to_digestable()
|
rrnamebuf = rrname.to_digestable()
|
||||||
|
@ -884,9 +666,8 @@ def _make_dnskey(
|
||||||
) -> DNSKEY:
|
) -> DNSKEY:
|
||||||
"""Convert a public key to DNSKEY Rdata
|
"""Convert a public key to DNSKEY Rdata
|
||||||
|
|
||||||
*public_key*, the public key to convert, a
|
*public_key*, a ``PublicKey`` (``GenericPublicKey`` or
|
||||||
``cryptography.hazmat.primitives.asymmetric`` public key class applicable
|
``cryptography.hazmat.primitives.asymmetric``) to convert.
|
||||||
for DNSSEC.
|
|
||||||
|
|
||||||
*algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm.
|
*algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm.
|
||||||
|
|
||||||
|
@ -902,72 +683,13 @@ def _make_dnskey(
|
||||||
Return DNSKEY ``Rdata``.
|
Return DNSKEY ``Rdata``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def encode_rsa_public_key(public_key: "rsa.RSAPublicKey") -> bytes:
|
algorithm = Algorithm.make(algorithm)
|
||||||
"""Encode a public key per RFC 3110, section 2."""
|
|
||||||
pn = public_key.public_numbers()
|
if isinstance(public_key, GenericPublicKey):
|
||||||
_exp_len = math.ceil(int.bit_length(pn.e) / 8)
|
return public_key.to_dnskey(flags=flags, protocol=protocol)
|
||||||
exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big")
|
|
||||||
if _exp_len > 255:
|
|
||||||
exp_header = b"\0" + struct.pack("!H", _exp_len)
|
|
||||||
else:
|
else:
|
||||||
exp_header = struct.pack("!B", _exp_len)
|
public_cls = get_algorithm_cls(algorithm).public_cls
|
||||||
if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096:
|
return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol)
|
||||||
raise ValueError("unsupported RSA key length")
|
|
||||||
return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big")
|
|
||||||
|
|
||||||
def encode_dsa_public_key(public_key: "dsa.DSAPublicKey") -> bytes:
|
|
||||||
"""Encode a public key per RFC 2536, section 2."""
|
|
||||||
pn = public_key.public_numbers()
|
|
||||||
dsa_t = (public_key.key_size // 8 - 64) // 8
|
|
||||||
if dsa_t > 8:
|
|
||||||
raise ValueError("unsupported DSA key size")
|
|
||||||
octets = 64 + dsa_t * 8
|
|
||||||
res = struct.pack("!B", dsa_t)
|
|
||||||
res += pn.parameter_numbers.q.to_bytes(20, "big")
|
|
||||||
res += pn.parameter_numbers.p.to_bytes(octets, "big")
|
|
||||||
res += pn.parameter_numbers.g.to_bytes(octets, "big")
|
|
||||||
res += pn.y.to_bytes(octets, "big")
|
|
||||||
return res
|
|
||||||
|
|
||||||
def encode_ecdsa_public_key(public_key: "ec.EllipticCurvePublicKey") -> bytes:
|
|
||||||
"""Encode a public key per RFC 6605, section 4."""
|
|
||||||
pn = public_key.public_numbers()
|
|
||||||
if isinstance(public_key.curve, ec.SECP256R1):
|
|
||||||
return pn.x.to_bytes(32, "big") + pn.y.to_bytes(32, "big")
|
|
||||||
elif isinstance(public_key.curve, ec.SECP384R1):
|
|
||||||
return pn.x.to_bytes(48, "big") + pn.y.to_bytes(48, "big")
|
|
||||||
else:
|
|
||||||
raise ValueError("unsupported ECDSA curve")
|
|
||||||
|
|
||||||
the_algorithm = Algorithm.make(algorithm)
|
|
||||||
|
|
||||||
_ensure_algorithm_key_combination(the_algorithm, public_key)
|
|
||||||
|
|
||||||
if isinstance(public_key, rsa.RSAPublicKey):
|
|
||||||
key_bytes = encode_rsa_public_key(public_key)
|
|
||||||
elif isinstance(public_key, dsa.DSAPublicKey):
|
|
||||||
key_bytes = encode_dsa_public_key(public_key)
|
|
||||||
elif isinstance(public_key, ec.EllipticCurvePublicKey):
|
|
||||||
key_bytes = encode_ecdsa_public_key(public_key)
|
|
||||||
elif isinstance(public_key, ed25519.Ed25519PublicKey):
|
|
||||||
key_bytes = public_key.public_bytes(
|
|
||||||
encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw
|
|
||||||
)
|
|
||||||
elif isinstance(public_key, ed448.Ed448PublicKey):
|
|
||||||
key_bytes = public_key.public_bytes(
|
|
||||||
encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise TypeError("unsupported key algorithm")
|
|
||||||
|
|
||||||
return DNSKEY(
|
|
||||||
rdclass=dns.rdataclass.IN,
|
|
||||||
rdtype=dns.rdatatype.DNSKEY,
|
|
||||||
flags=flags,
|
|
||||||
protocol=protocol,
|
|
||||||
algorithm=the_algorithm,
|
|
||||||
key=key_bytes,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_cdnskey(
|
def _make_cdnskey(
|
||||||
|
@ -1216,23 +938,252 @@ def dnskey_rdataset_to_cdnskey_rdataset(
|
||||||
return dns.rdataset.from_rdata_list(rdataset.ttl, res)
|
return dns.rdataset.from_rdata_list(rdataset.ttl, res)
|
||||||
|
|
||||||
|
|
||||||
|
def default_rrset_signer(
|
||||||
|
txn: dns.transaction.Transaction,
|
||||||
|
rrset: dns.rrset.RRset,
|
||||||
|
signer: dns.name.Name,
|
||||||
|
ksks: List[Tuple[PrivateKey, DNSKEY]],
|
||||||
|
zsks: List[Tuple[PrivateKey, DNSKEY]],
|
||||||
|
inception: Optional[Union[datetime, str, int, float]] = None,
|
||||||
|
expiration: Optional[Union[datetime, str, int, float]] = None,
|
||||||
|
lifetime: Optional[int] = None,
|
||||||
|
policy: Optional[Policy] = None,
|
||||||
|
origin: Optional[dns.name.Name] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Default RRset signer"""
|
||||||
|
|
||||||
|
if rrset.rdtype in set(
|
||||||
|
[
|
||||||
|
dns.rdatatype.RdataType.DNSKEY,
|
||||||
|
dns.rdatatype.RdataType.CDS,
|
||||||
|
dns.rdatatype.RdataType.CDNSKEY,
|
||||||
|
]
|
||||||
|
):
|
||||||
|
keys = ksks
|
||||||
|
else:
|
||||||
|
keys = zsks
|
||||||
|
|
||||||
|
for private_key, dnskey in keys:
|
||||||
|
rrsig = dns.dnssec.sign(
|
||||||
|
rrset=rrset,
|
||||||
|
private_key=private_key,
|
||||||
|
dnskey=dnskey,
|
||||||
|
inception=inception,
|
||||||
|
expiration=expiration,
|
||||||
|
lifetime=lifetime,
|
||||||
|
signer=signer,
|
||||||
|
policy=policy,
|
||||||
|
origin=origin,
|
||||||
|
)
|
||||||
|
txn.add(rrset.name, rrset.ttl, rrsig)
|
||||||
|
|
||||||
|
|
||||||
|
def sign_zone(
|
||||||
|
zone: dns.zone.Zone,
|
||||||
|
txn: Optional[dns.transaction.Transaction] = None,
|
||||||
|
keys: Optional[List[Tuple[PrivateKey, DNSKEY]]] = None,
|
||||||
|
add_dnskey: bool = True,
|
||||||
|
dnskey_ttl: Optional[int] = None,
|
||||||
|
inception: Optional[Union[datetime, str, int, float]] = None,
|
||||||
|
expiration: Optional[Union[datetime, str, int, float]] = None,
|
||||||
|
lifetime: Optional[int] = None,
|
||||||
|
nsec3: Optional[NSEC3PARAM] = None,
|
||||||
|
rrset_signer: Optional[RRsetSigner] = None,
|
||||||
|
policy: Optional[Policy] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Sign zone.
|
||||||
|
|
||||||
|
*zone*, a ``dns.zone.Zone``, the zone to sign.
|
||||||
|
|
||||||
|
*txn*, a ``dns.transaction.Transaction``, an optional transaction to use for
|
||||||
|
signing.
|
||||||
|
|
||||||
|
*keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK
|
||||||
|
roles are assigned automatically if the SEP flag is used, otherwise all RRsets are
|
||||||
|
signed by all keys.
|
||||||
|
|
||||||
|
*add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are
|
||||||
|
automatically added to the zone on signing.
|
||||||
|
|
||||||
|
*dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL
|
||||||
|
of the existing DNSKEY RRset used or the TTL of the SOA RRset.
|
||||||
|
|
||||||
|
*inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature
|
||||||
|
inception time. If ``None``, the current time is used. If a ``str``, the format is
|
||||||
|
"YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text
|
||||||
|
form; this is the same the RRSIG rdata's text form. Values of type `int` or `float`
|
||||||
|
are interpreted as seconds since the UNIX epoch.
|
||||||
|
|
||||||
|
*expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature
|
||||||
|
expiration time. If ``None``, the expiration time will be the inception time plus
|
||||||
|
the value of the *lifetime* parameter. See the description of *inception* above for
|
||||||
|
how the various parameter types are interpreted.
|
||||||
|
|
||||||
|
*lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This
|
||||||
|
parameter is only meaningful if *expiration* is ``None``.
|
||||||
|
|
||||||
|
*nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet
|
||||||
|
implemented.
|
||||||
|
|
||||||
|
*rrset_signer*, a ``Callable``, an optional function for signing RRsets. The
|
||||||
|
function requires two arguments: transaction and RRset. If the not specified,
|
||||||
|
``dns.dnssec.default_rrset_signer`` will be used.
|
||||||
|
|
||||||
|
Returns ``None``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ksks = []
|
||||||
|
zsks = []
|
||||||
|
|
||||||
|
# if we have both KSKs and ZSKs, split by SEP flag. if not, sign all
|
||||||
|
# records with all keys
|
||||||
|
if keys:
|
||||||
|
for key in keys:
|
||||||
|
if key[1].flags & Flag.SEP:
|
||||||
|
ksks.append(key)
|
||||||
|
else:
|
||||||
|
zsks.append(key)
|
||||||
|
if not ksks:
|
||||||
|
ksks = keys
|
||||||
|
if not zsks:
|
||||||
|
zsks = keys
|
||||||
|
else:
|
||||||
|
keys = []
|
||||||
|
|
||||||
|
if txn:
|
||||||
|
cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn)
|
||||||
|
else:
|
||||||
|
cm = zone.writer()
|
||||||
|
|
||||||
|
with cm as _txn:
|
||||||
|
if add_dnskey:
|
||||||
|
if dnskey_ttl is None:
|
||||||
|
dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY)
|
||||||
|
if dnskey:
|
||||||
|
dnskey_ttl = dnskey.ttl
|
||||||
|
else:
|
||||||
|
soa = _txn.get(zone.origin, dns.rdatatype.SOA)
|
||||||
|
dnskey_ttl = soa.ttl
|
||||||
|
for _, dnskey in keys:
|
||||||
|
_txn.add(zone.origin, dnskey_ttl, dnskey)
|
||||||
|
|
||||||
|
if nsec3:
|
||||||
|
raise NotImplementedError("Signing with NSEC3 not yet implemented")
|
||||||
|
else:
|
||||||
|
_rrset_signer = rrset_signer or functools.partial(
|
||||||
|
default_rrset_signer,
|
||||||
|
signer=zone.origin,
|
||||||
|
ksks=ksks,
|
||||||
|
zsks=zsks,
|
||||||
|
inception=inception,
|
||||||
|
expiration=expiration,
|
||||||
|
lifetime=lifetime,
|
||||||
|
policy=policy,
|
||||||
|
origin=zone.origin,
|
||||||
|
)
|
||||||
|
return _sign_zone_nsec(zone, _txn, _rrset_signer)
|
||||||
|
|
||||||
|
|
||||||
|
def _sign_zone_nsec(
|
||||||
|
zone: dns.zone.Zone,
|
||||||
|
txn: dns.transaction.Transaction,
|
||||||
|
rrset_signer: Optional[RRsetSigner] = None,
|
||||||
|
) -> None:
|
||||||
|
"""NSEC zone signer"""
|
||||||
|
|
||||||
|
def _txn_add_nsec(
|
||||||
|
txn: dns.transaction.Transaction,
|
||||||
|
name: dns.name.Name,
|
||||||
|
next_secure: Optional[dns.name.Name],
|
||||||
|
rdclass: dns.rdataclass.RdataClass,
|
||||||
|
ttl: int,
|
||||||
|
rrset_signer: Optional[RRsetSigner] = None,
|
||||||
|
) -> None:
|
||||||
|
"""NSEC zone signer helper"""
|
||||||
|
mandatory_types = set(
|
||||||
|
[dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC]
|
||||||
|
)
|
||||||
|
node = txn.get_node(name)
|
||||||
|
if node and next_secure:
|
||||||
|
types = (
|
||||||
|
set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types
|
||||||
|
)
|
||||||
|
windows = Bitmap.from_rdtypes(list(types))
|
||||||
|
rrset = dns.rrset.from_rdata(
|
||||||
|
name,
|
||||||
|
ttl,
|
||||||
|
NSEC(
|
||||||
|
rdclass=rdclass,
|
||||||
|
rdtype=dns.rdatatype.RdataType.NSEC,
|
||||||
|
next=next_secure,
|
||||||
|
windows=windows,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
txn.add(rrset)
|
||||||
|
if rrset_signer:
|
||||||
|
rrset_signer(txn, rrset)
|
||||||
|
|
||||||
|
rrsig_ttl = zone.get_soa().minimum
|
||||||
|
delegation = None
|
||||||
|
last_secure = None
|
||||||
|
|
||||||
|
for name in sorted(txn.iterate_names()):
|
||||||
|
if delegation and name.is_subdomain(delegation):
|
||||||
|
# names below delegations are not secure
|
||||||
|
continue
|
||||||
|
elif txn.get(name, dns.rdatatype.NS) and name != zone.origin:
|
||||||
|
# inside delegation
|
||||||
|
delegation = name
|
||||||
|
else:
|
||||||
|
# outside delegation
|
||||||
|
delegation = None
|
||||||
|
|
||||||
|
if rrset_signer:
|
||||||
|
node = txn.get_node(name)
|
||||||
|
if node:
|
||||||
|
for rdataset in node.rdatasets:
|
||||||
|
if rdataset.rdtype == dns.rdatatype.RRSIG:
|
||||||
|
# do not sign RRSIGs
|
||||||
|
continue
|
||||||
|
elif delegation and rdataset.rdtype != dns.rdatatype.DS:
|
||||||
|
# do not sign delegations except DS records
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset)
|
||||||
|
rrset_signer(txn, rrset)
|
||||||
|
|
||||||
|
# We need "is not None" as the empty name is False because its length is 0.
|
||||||
|
if last_secure is not None:
|
||||||
|
_txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer)
|
||||||
|
last_secure = name
|
||||||
|
|
||||||
|
if last_secure:
|
||||||
|
_txn_add_nsec(
|
||||||
|
txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _need_pyca(*args, **kwargs):
|
def _need_pyca(*args, **kwargs):
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
"DNSSEC validation requires " + "python cryptography"
|
"DNSSEC validation requires python cryptography"
|
||||||
) # pragma: no cover
|
) # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from cryptography.exceptions import InvalidSignature
|
from cryptography.exceptions import InvalidSignature
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611
|
||||||
from cryptography.hazmat.primitives.asymmetric import padding
|
from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611
|
||||||
from cryptography.hazmat.primitives.asymmetric import utils
|
from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611
|
||||||
from cryptography.hazmat.primitives.asymmetric import dsa
|
from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611
|
||||||
from cryptography.hazmat.primitives.asymmetric import ec
|
ed25519,
|
||||||
from cryptography.hazmat.primitives.asymmetric import ed25519
|
)
|
||||||
from cryptography.hazmat.primitives.asymmetric import ed448
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
from dns.dnssecalgs import ( # pylint: disable=C0412
|
||||||
|
get_algorithm_cls,
|
||||||
|
get_algorithm_cls_from_dnskey,
|
||||||
|
)
|
||||||
|
from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
validate = _need_pyca
|
validate = _need_pyca
|
||||||
validate_rrsig = _need_pyca
|
validate_rrsig = _need_pyca
|
||||||
|
|
121
lib/dns/dnssecalgs/__init__.py
Normal file
121
lib/dns/dnssecalgs/__init__.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
from typing import Dict, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
|
import dns.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
from dns.dnssecalgs.base import GenericPrivateKey
|
||||||
|
from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1
|
||||||
|
from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384
|
||||||
|
from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519
|
||||||
|
from dns.dnssecalgs.rsa import (
|
||||||
|
PrivateRSAMD5,
|
||||||
|
PrivateRSASHA1,
|
||||||
|
PrivateRSASHA1NSEC3SHA1,
|
||||||
|
PrivateRSASHA256,
|
||||||
|
PrivateRSASHA512,
|
||||||
|
)
|
||||||
|
|
||||||
|
_have_cryptography = True
|
||||||
|
except ImportError:
|
||||||
|
_have_cryptography = False
|
||||||
|
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.exception import UnsupportedAlgorithm
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
|
||||||
|
AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]]
|
||||||
|
|
||||||
|
algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {}
|
||||||
|
if _have_cryptography:
|
||||||
|
algorithms.update(
|
||||||
|
{
|
||||||
|
(Algorithm.RSAMD5, None): PrivateRSAMD5,
|
||||||
|
(Algorithm.DSA, None): PrivateDSA,
|
||||||
|
(Algorithm.RSASHA1, None): PrivateRSASHA1,
|
||||||
|
(Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1,
|
||||||
|
(Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1,
|
||||||
|
(Algorithm.RSASHA256, None): PrivateRSASHA256,
|
||||||
|
(Algorithm.RSASHA512, None): PrivateRSASHA512,
|
||||||
|
(Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256,
|
||||||
|
(Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384,
|
||||||
|
(Algorithm.ED25519, None): PrivateED25519,
|
||||||
|
(Algorithm.ED448, None): PrivateED448,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_algorithm_cls(
|
||||||
|
algorithm: Union[int, str], prefix: AlgorithmPrefix = None
|
||||||
|
) -> Type[GenericPrivateKey]:
|
||||||
|
"""Get Private Key class from Algorithm.
|
||||||
|
|
||||||
|
*algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm.
|
||||||
|
|
||||||
|
Raises ``UnsupportedAlgorithm`` if the algorithm is unknown.
|
||||||
|
|
||||||
|
Returns a ``dns.dnssecalgs.GenericPrivateKey``
|
||||||
|
"""
|
||||||
|
algorithm = Algorithm.make(algorithm)
|
||||||
|
cls = algorithms.get((algorithm, prefix))
|
||||||
|
if cls:
|
||||||
|
return cls
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
'algorithm "%s" not supported by dnspython' % Algorithm.to_text(algorithm)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]:
|
||||||
|
"""Get Private Key class from DNSKEY.
|
||||||
|
|
||||||
|
*dnskey*, a ``DNSKEY`` to get Algorithm class for.
|
||||||
|
|
||||||
|
Raises ``UnsupportedAlgorithm`` if the algorithm is unknown.
|
||||||
|
|
||||||
|
Returns a ``dns.dnssecalgs.GenericPrivateKey``
|
||||||
|
"""
|
||||||
|
prefix: AlgorithmPrefix = None
|
||||||
|
if dnskey.algorithm == Algorithm.PRIVATEDNS:
|
||||||
|
prefix, _ = dns.name.from_wire(dnskey.key, 0)
|
||||||
|
elif dnskey.algorithm == Algorithm.PRIVATEOID:
|
||||||
|
length = int(dnskey.key[0])
|
||||||
|
prefix = dnskey.key[0 : length + 1]
|
||||||
|
return get_algorithm_cls(dnskey.algorithm, prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def register_algorithm_cls(
|
||||||
|
algorithm: Union[int, str],
|
||||||
|
algorithm_cls: Type[GenericPrivateKey],
|
||||||
|
name: Optional[Union[dns.name.Name, str]] = None,
|
||||||
|
oid: Optional[bytes] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Register Algorithm Private Key class.
|
||||||
|
|
||||||
|
*algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm.
|
||||||
|
|
||||||
|
*algorithm_cls*: A `GenericPrivateKey` class.
|
||||||
|
|
||||||
|
*name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms.
|
||||||
|
|
||||||
|
*oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms.
|
||||||
|
|
||||||
|
Raises ``ValueError`` if a name or oid is specified incorrectly.
|
||||||
|
"""
|
||||||
|
if not issubclass(algorithm_cls, GenericPrivateKey):
|
||||||
|
raise TypeError("Invalid algorithm class")
|
||||||
|
algorithm = Algorithm.make(algorithm)
|
||||||
|
prefix: AlgorithmPrefix = None
|
||||||
|
if algorithm == Algorithm.PRIVATEDNS:
|
||||||
|
if name is None:
|
||||||
|
raise ValueError("Name required for PRIVATEDNS algorithms")
|
||||||
|
if isinstance(name, str):
|
||||||
|
name = dns.name.from_text(name)
|
||||||
|
prefix = name
|
||||||
|
elif algorithm == Algorithm.PRIVATEOID:
|
||||||
|
if oid is None:
|
||||||
|
raise ValueError("OID required for PRIVATEOID algorithms")
|
||||||
|
prefix = bytes([len(oid)]) + oid
|
||||||
|
elif name:
|
||||||
|
raise ValueError("Name only supported for PRIVATEDNS algorithm")
|
||||||
|
elif oid:
|
||||||
|
raise ValueError("OID only supported for PRIVATEOID algorithm")
|
||||||
|
algorithms[(algorithm, prefix)] = algorithm_cls
|
84
lib/dns/dnssecalgs/base.py
Normal file
84
lib/dns/dnssecalgs/base.py
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
from abc import ABC, abstractmethod # pylint: disable=no-name-in-module
|
||||||
|
from typing import Any, Optional, Type
|
||||||
|
|
||||||
|
import dns.rdataclass
|
||||||
|
import dns.rdatatype
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.exception import AlgorithmKeyMismatch
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
from dns.rdtypes.dnskeybase import Flag
|
||||||
|
|
||||||
|
|
||||||
|
class GenericPublicKey(ABC):
|
||||||
|
algorithm: Algorithm
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, key: Any) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def verify(self, signature: bytes, data: bytes) -> None:
|
||||||
|
"""Verify signed DNSSEC data"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def encode_key_bytes(self) -> bytes:
|
||||||
|
"""Encode key as bytes for DNSKEY"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None:
|
||||||
|
if key.algorithm != cls.algorithm:
|
||||||
|
raise AlgorithmKeyMismatch
|
||||||
|
|
||||||
|
def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY:
|
||||||
|
"""Return public key as DNSKEY"""
|
||||||
|
return DNSKEY(
|
||||||
|
rdclass=dns.rdataclass.IN,
|
||||||
|
rdtype=dns.rdatatype.DNSKEY,
|
||||||
|
flags=flags,
|
||||||
|
protocol=protocol,
|
||||||
|
algorithm=self.algorithm,
|
||||||
|
key=self.encode_key_bytes(),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey":
|
||||||
|
"""Create public key from DNSKEY"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def from_pem(cls, public_pem: bytes) -> "GenericPublicKey":
|
||||||
|
"""Create public key from PEM-encoded SubjectPublicKeyInfo as specified
|
||||||
|
in RFC 5280"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def to_pem(self) -> bytes:
|
||||||
|
"""Return public-key as PEM-encoded SubjectPublicKeyInfo as specified
|
||||||
|
in RFC 5280"""
|
||||||
|
|
||||||
|
|
||||||
|
class GenericPrivateKey(ABC):
|
||||||
|
public_cls: Type[GenericPublicKey]
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, key: Any) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def sign(self, data: bytes, verify: bool = False) -> bytes:
|
||||||
|
"""Sign DNSSEC data"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def public_key(self) -> "GenericPublicKey":
|
||||||
|
"""Return public key instance"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def from_pem(
|
||||||
|
cls, private_pem: bytes, password: Optional[bytes] = None
|
||||||
|
) -> "GenericPrivateKey":
|
||||||
|
"""Create private key from PEM-encoded PKCS#8"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def to_pem(self, password: Optional[bytes] = None) -> bytes:
|
||||||
|
"""Return private key as PEM-encoded PKCS#8"""
|
68
lib/dns/dnssecalgs/cryptography.py
Normal file
68
lib/dns/dnssecalgs/cryptography.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
from typing import Any, Optional, Type
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
|
||||||
|
from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey
|
||||||
|
from dns.exception import AlgorithmKeyMismatch
|
||||||
|
|
||||||
|
|
||||||
|
class CryptographyPublicKey(GenericPublicKey):
|
||||||
|
key: Any = None
|
||||||
|
key_cls: Any = None
|
||||||
|
|
||||||
|
def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called
|
||||||
|
if self.key_cls is None:
|
||||||
|
raise TypeError("Undefined private key class")
|
||||||
|
if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type
|
||||||
|
key, self.key_cls
|
||||||
|
):
|
||||||
|
raise AlgorithmKeyMismatch
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_pem(cls, public_pem: bytes) -> "GenericPublicKey":
|
||||||
|
key = serialization.load_pem_public_key(public_pem)
|
||||||
|
return cls(key=key)
|
||||||
|
|
||||||
|
def to_pem(self) -> bytes:
|
||||||
|
return self.key.public_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CryptographyPrivateKey(GenericPrivateKey):
|
||||||
|
key: Any = None
|
||||||
|
key_cls: Any = None
|
||||||
|
public_cls: Type[CryptographyPublicKey]
|
||||||
|
|
||||||
|
def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called
|
||||||
|
if self.key_cls is None:
|
||||||
|
raise TypeError("Undefined private key class")
|
||||||
|
if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type
|
||||||
|
key, self.key_cls
|
||||||
|
):
|
||||||
|
raise AlgorithmKeyMismatch
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
def public_key(self) -> "CryptographyPublicKey":
|
||||||
|
return self.public_cls(key=self.key.public_key())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_pem(
|
||||||
|
cls, private_pem: bytes, password: Optional[bytes] = None
|
||||||
|
) -> "GenericPrivateKey":
|
||||||
|
key = serialization.load_pem_private_key(private_pem, password=password)
|
||||||
|
return cls(key=key)
|
||||||
|
|
||||||
|
def to_pem(self, password: Optional[bytes] = None) -> bytes:
|
||||||
|
encryption_algorithm: serialization.KeySerializationEncryption
|
||||||
|
if password:
|
||||||
|
encryption_algorithm = serialization.BestAvailableEncryption(password)
|
||||||
|
else:
|
||||||
|
encryption_algorithm = serialization.NoEncryption()
|
||||||
|
return self.key.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PrivateFormat.PKCS8,
|
||||||
|
encryption_algorithm=encryption_algorithm,
|
||||||
|
)
|
101
lib/dns/dnssecalgs/dsa.py
Normal file
101
lib/dns/dnssecalgs/dsa.py
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
import struct
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import dsa, utils
|
||||||
|
|
||||||
|
from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
|
||||||
|
|
||||||
|
class PublicDSA(CryptographyPublicKey):
|
||||||
|
key: dsa.DSAPublicKey
|
||||||
|
key_cls = dsa.DSAPublicKey
|
||||||
|
algorithm = Algorithm.DSA
|
||||||
|
chosen_hash = hashes.SHA1()
|
||||||
|
|
||||||
|
def verify(self, signature: bytes, data: bytes) -> None:
|
||||||
|
sig_r = signature[1:21]
|
||||||
|
sig_s = signature[21:]
|
||||||
|
sig = utils.encode_dss_signature(
|
||||||
|
int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big")
|
||||||
|
)
|
||||||
|
self.key.verify(sig, data, self.chosen_hash)
|
||||||
|
|
||||||
|
def encode_key_bytes(self) -> bytes:
|
||||||
|
"""Encode a public key per RFC 2536, section 2."""
|
||||||
|
pn = self.key.public_numbers()
|
||||||
|
dsa_t = (self.key.key_size // 8 - 64) // 8
|
||||||
|
if dsa_t > 8:
|
||||||
|
raise ValueError("unsupported DSA key size")
|
||||||
|
octets = 64 + dsa_t * 8
|
||||||
|
res = struct.pack("!B", dsa_t)
|
||||||
|
res += pn.parameter_numbers.q.to_bytes(20, "big")
|
||||||
|
res += pn.parameter_numbers.p.to_bytes(octets, "big")
|
||||||
|
res += pn.parameter_numbers.g.to_bytes(octets, "big")
|
||||||
|
res += pn.y.to_bytes(octets, "big")
|
||||||
|
return res
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dnskey(cls, key: DNSKEY) -> "PublicDSA":
|
||||||
|
cls._ensure_algorithm_key_combination(key)
|
||||||
|
keyptr = key.key
|
||||||
|
(t,) = struct.unpack("!B", keyptr[0:1])
|
||||||
|
keyptr = keyptr[1:]
|
||||||
|
octets = 64 + t * 8
|
||||||
|
dsa_q = keyptr[0:20]
|
||||||
|
keyptr = keyptr[20:]
|
||||||
|
dsa_p = keyptr[0:octets]
|
||||||
|
keyptr = keyptr[octets:]
|
||||||
|
dsa_g = keyptr[0:octets]
|
||||||
|
keyptr = keyptr[octets:]
|
||||||
|
dsa_y = keyptr[0:octets]
|
||||||
|
return cls(
|
||||||
|
key=dsa.DSAPublicNumbers( # type: ignore
|
||||||
|
int.from_bytes(dsa_y, "big"),
|
||||||
|
dsa.DSAParameterNumbers(
|
||||||
|
int.from_bytes(dsa_p, "big"),
|
||||||
|
int.from_bytes(dsa_q, "big"),
|
||||||
|
int.from_bytes(dsa_g, "big"),
|
||||||
|
),
|
||||||
|
).public_key(default_backend()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateDSA(CryptographyPrivateKey):
|
||||||
|
key: dsa.DSAPrivateKey
|
||||||
|
key_cls = dsa.DSAPrivateKey
|
||||||
|
public_cls = PublicDSA
|
||||||
|
|
||||||
|
def sign(self, data: bytes, verify: bool = False) -> bytes:
|
||||||
|
"""Sign using a private key per RFC 2536, section 3."""
|
||||||
|
public_dsa_key = self.key.public_key()
|
||||||
|
if public_dsa_key.key_size > 1024:
|
||||||
|
raise ValueError("DSA key size overflow")
|
||||||
|
der_signature = self.key.sign(data, self.public_cls.chosen_hash)
|
||||||
|
dsa_r, dsa_s = utils.decode_dss_signature(der_signature)
|
||||||
|
dsa_t = (public_dsa_key.key_size // 8 - 64) // 8
|
||||||
|
octets = 20
|
||||||
|
signature = (
|
||||||
|
struct.pack("!B", dsa_t)
|
||||||
|
+ int.to_bytes(dsa_r, length=octets, byteorder="big")
|
||||||
|
+ int.to_bytes(dsa_s, length=octets, byteorder="big")
|
||||||
|
)
|
||||||
|
if verify:
|
||||||
|
self.public_key().verify(signature, data)
|
||||||
|
return signature
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(cls, key_size: int) -> "PrivateDSA":
|
||||||
|
return cls(
|
||||||
|
key=dsa.generate_private_key(key_size=key_size),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PublicDSANSEC3SHA1(PublicDSA):
|
||||||
|
algorithm = Algorithm.DSANSEC3SHA1
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateDSANSEC3SHA1(PrivateDSA):
|
||||||
|
public_cls = PublicDSANSEC3SHA1
|
89
lib/dns/dnssecalgs/ecdsa.py
Normal file
89
lib/dns/dnssecalgs/ecdsa.py
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import ec, utils
|
||||||
|
|
||||||
|
from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
|
||||||
|
|
||||||
|
class PublicECDSA(CryptographyPublicKey):
|
||||||
|
key: ec.EllipticCurvePublicKey
|
||||||
|
key_cls = ec.EllipticCurvePublicKey
|
||||||
|
algorithm: Algorithm
|
||||||
|
chosen_hash: hashes.HashAlgorithm
|
||||||
|
curve: ec.EllipticCurve
|
||||||
|
octets: int
|
||||||
|
|
||||||
|
def verify(self, signature: bytes, data: bytes) -> None:
|
||||||
|
sig_r = signature[0 : self.octets]
|
||||||
|
sig_s = signature[self.octets :]
|
||||||
|
sig = utils.encode_dss_signature(
|
||||||
|
int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big")
|
||||||
|
)
|
||||||
|
self.key.verify(sig, data, ec.ECDSA(self.chosen_hash))
|
||||||
|
|
||||||
|
def encode_key_bytes(self) -> bytes:
|
||||||
|
"""Encode a public key per RFC 6605, section 4."""
|
||||||
|
pn = self.key.public_numbers()
|
||||||
|
return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA":
|
||||||
|
cls._ensure_algorithm_key_combination(key)
|
||||||
|
ecdsa_x = key.key[0 : cls.octets]
|
||||||
|
ecdsa_y = key.key[cls.octets : cls.octets * 2]
|
||||||
|
return cls(
|
||||||
|
key=ec.EllipticCurvePublicNumbers(
|
||||||
|
curve=cls.curve,
|
||||||
|
x=int.from_bytes(ecdsa_x, "big"),
|
||||||
|
y=int.from_bytes(ecdsa_y, "big"),
|
||||||
|
).public_key(default_backend()),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateECDSA(CryptographyPrivateKey):
|
||||||
|
key: ec.EllipticCurvePrivateKey
|
||||||
|
key_cls = ec.EllipticCurvePrivateKey
|
||||||
|
public_cls = PublicECDSA
|
||||||
|
|
||||||
|
def sign(self, data: bytes, verify: bool = False) -> bytes:
|
||||||
|
"""Sign using a private key per RFC 6605, section 4."""
|
||||||
|
der_signature = self.key.sign(data, ec.ECDSA(self.public_cls.chosen_hash))
|
||||||
|
dsa_r, dsa_s = utils.decode_dss_signature(der_signature)
|
||||||
|
signature = int.to_bytes(
|
||||||
|
dsa_r, length=self.public_cls.octets, byteorder="big"
|
||||||
|
) + int.to_bytes(dsa_s, length=self.public_cls.octets, byteorder="big")
|
||||||
|
if verify:
|
||||||
|
self.public_key().verify(signature, data)
|
||||||
|
return signature
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(cls) -> "PrivateECDSA":
|
||||||
|
return cls(
|
||||||
|
key=ec.generate_private_key(
|
||||||
|
curve=cls.public_cls.curve, backend=default_backend()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PublicECDSAP256SHA256(PublicECDSA):
|
||||||
|
algorithm = Algorithm.ECDSAP256SHA256
|
||||||
|
chosen_hash = hashes.SHA256()
|
||||||
|
curve = ec.SECP256R1()
|
||||||
|
octets = 32
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateECDSAP256SHA256(PrivateECDSA):
|
||||||
|
public_cls = PublicECDSAP256SHA256
|
||||||
|
|
||||||
|
|
||||||
|
class PublicECDSAP384SHA384(PublicECDSA):
|
||||||
|
algorithm = Algorithm.ECDSAP384SHA384
|
||||||
|
chosen_hash = hashes.SHA384()
|
||||||
|
curve = ec.SECP384R1()
|
||||||
|
octets = 48
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateECDSAP384SHA384(PrivateECDSA):
|
||||||
|
public_cls = PublicECDSAP384SHA384
|
65
lib/dns/dnssecalgs/eddsa.py
Normal file
65
lib/dns/dnssecalgs/eddsa.py
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import ed448, ed25519
|
||||||
|
|
||||||
|
from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
|
||||||
|
|
||||||
|
class PublicEDDSA(CryptographyPublicKey):
|
||||||
|
def verify(self, signature: bytes, data: bytes) -> None:
|
||||||
|
self.key.verify(signature, data)
|
||||||
|
|
||||||
|
def encode_key_bytes(self) -> bytes:
|
||||||
|
"""Encode a public key per RFC 8080, section 3."""
|
||||||
|
return self.key.public_bytes(
|
||||||
|
encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA":
|
||||||
|
cls._ensure_algorithm_key_combination(key)
|
||||||
|
return cls(
|
||||||
|
key=cls.key_cls.from_public_bytes(key.key),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateEDDSA(CryptographyPrivateKey):
|
||||||
|
public_cls: Type[PublicEDDSA]
|
||||||
|
|
||||||
|
def sign(self, data: bytes, verify: bool = False) -> bytes:
|
||||||
|
"""Sign using a private key per RFC 8080, section 4."""
|
||||||
|
signature = self.key.sign(data)
|
||||||
|
if verify:
|
||||||
|
self.public_key().verify(signature, data)
|
||||||
|
return signature
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(cls) -> "PrivateEDDSA":
|
||||||
|
return cls(key=cls.key_cls.generate())
|
||||||
|
|
||||||
|
|
||||||
|
class PublicED25519(PublicEDDSA):
|
||||||
|
key: ed25519.Ed25519PublicKey
|
||||||
|
key_cls = ed25519.Ed25519PublicKey
|
||||||
|
algorithm = Algorithm.ED25519
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateED25519(PrivateEDDSA):
|
||||||
|
key: ed25519.Ed25519PrivateKey
|
||||||
|
key_cls = ed25519.Ed25519PrivateKey
|
||||||
|
public_cls = PublicED25519
|
||||||
|
|
||||||
|
|
||||||
|
class PublicED448(PublicEDDSA):
|
||||||
|
key: ed448.Ed448PublicKey
|
||||||
|
key_cls = ed448.Ed448PublicKey
|
||||||
|
algorithm = Algorithm.ED448
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateED448(PrivateEDDSA):
|
||||||
|
key: ed448.Ed448PrivateKey
|
||||||
|
key_cls = ed448.Ed448PrivateKey
|
||||||
|
public_cls = PublicED448
|
119
lib/dns/dnssecalgs/rsa.py
Normal file
119
lib/dns/dnssecalgs/rsa.py
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
import math
|
||||||
|
import struct
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||||
|
|
||||||
|
from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey
|
||||||
|
from dns.dnssectypes import Algorithm
|
||||||
|
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSA(CryptographyPublicKey):
|
||||||
|
key: rsa.RSAPublicKey
|
||||||
|
key_cls = rsa.RSAPublicKey
|
||||||
|
algorithm: Algorithm
|
||||||
|
chosen_hash: hashes.HashAlgorithm
|
||||||
|
|
||||||
|
def verify(self, signature: bytes, data: bytes) -> None:
|
||||||
|
self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash)
|
||||||
|
|
||||||
|
def encode_key_bytes(self) -> bytes:
|
||||||
|
"""Encode a public key per RFC 3110, section 2."""
|
||||||
|
pn = self.key.public_numbers()
|
||||||
|
_exp_len = math.ceil(int.bit_length(pn.e) / 8)
|
||||||
|
exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big")
|
||||||
|
if _exp_len > 255:
|
||||||
|
exp_header = b"\0" + struct.pack("!H", _exp_len)
|
||||||
|
else:
|
||||||
|
exp_header = struct.pack("!B", _exp_len)
|
||||||
|
if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096:
|
||||||
|
raise ValueError("unsupported RSA key length")
|
||||||
|
return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dnskey(cls, key: DNSKEY) -> "PublicRSA":
|
||||||
|
cls._ensure_algorithm_key_combination(key)
|
||||||
|
keyptr = key.key
|
||||||
|
(bytes_,) = struct.unpack("!B", keyptr[0:1])
|
||||||
|
keyptr = keyptr[1:]
|
||||||
|
if bytes_ == 0:
|
||||||
|
(bytes_,) = struct.unpack("!H", keyptr[0:2])
|
||||||
|
keyptr = keyptr[2:]
|
||||||
|
rsa_e = keyptr[0:bytes_]
|
||||||
|
rsa_n = keyptr[bytes_:]
|
||||||
|
return cls(
|
||||||
|
key=rsa.RSAPublicNumbers(
|
||||||
|
int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big")
|
||||||
|
).public_key(default_backend())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSA(CryptographyPrivateKey):
|
||||||
|
key: rsa.RSAPrivateKey
|
||||||
|
key_cls = rsa.RSAPrivateKey
|
||||||
|
public_cls = PublicRSA
|
||||||
|
default_public_exponent = 65537
|
||||||
|
|
||||||
|
def sign(self, data: bytes, verify: bool = False) -> bytes:
|
||||||
|
"""Sign using a private key per RFC 3110, section 3."""
|
||||||
|
signature = self.key.sign(data, padding.PKCS1v15(), self.public_cls.chosen_hash)
|
||||||
|
if verify:
|
||||||
|
self.public_key().verify(signature, data)
|
||||||
|
return signature
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(cls, key_size: int) -> "PrivateRSA":
|
||||||
|
return cls(
|
||||||
|
key=rsa.generate_private_key(
|
||||||
|
public_exponent=cls.default_public_exponent,
|
||||||
|
key_size=key_size,
|
||||||
|
backend=default_backend(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSAMD5(PublicRSA):
|
||||||
|
algorithm = Algorithm.RSAMD5
|
||||||
|
chosen_hash = hashes.MD5()
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSAMD5(PrivateRSA):
|
||||||
|
public_cls = PublicRSAMD5
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSASHA1(PublicRSA):
|
||||||
|
algorithm = Algorithm.RSASHA1
|
||||||
|
chosen_hash = hashes.SHA1()
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSASHA1(PrivateRSA):
|
||||||
|
public_cls = PublicRSASHA1
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSASHA1NSEC3SHA1(PublicRSA):
|
||||||
|
algorithm = Algorithm.RSASHA1NSEC3SHA1
|
||||||
|
chosen_hash = hashes.SHA1()
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSASHA1NSEC3SHA1(PrivateRSA):
|
||||||
|
public_cls = PublicRSASHA1NSEC3SHA1
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSASHA256(PublicRSA):
|
||||||
|
algorithm = Algorithm.RSASHA256
|
||||||
|
chosen_hash = hashes.SHA256()
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSASHA256(PrivateRSA):
|
||||||
|
public_cls = PublicRSASHA256
|
||||||
|
|
||||||
|
|
||||||
|
class PublicRSASHA512(PublicRSA):
|
||||||
|
algorithm = Algorithm.RSASHA512
|
||||||
|
chosen_hash = hashes.SHA512()
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateRSASHA512(PrivateRSA):
|
||||||
|
public_cls = PublicRSASHA512
|
|
@ -17,11 +17,10 @@
|
||||||
|
|
||||||
"""EDNS Options"""
|
"""EDNS Options"""
|
||||||
|
|
||||||
from typing import Any, Dict, Optional, Union
|
|
||||||
|
|
||||||
import math
|
import math
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
|
from typing import Any, Dict, Optional, Union
|
||||||
|
|
||||||
import dns.enum
|
import dns.enum
|
||||||
import dns.inet
|
import dns.inet
|
||||||
|
@ -380,7 +379,7 @@ class EDEOption(Option): # lgtm[py/missing-equals]
|
||||||
def from_wire_parser(
|
def from_wire_parser(
|
||||||
cls, otype: Union[OptionType, str], parser: "dns.wire.Parser"
|
cls, otype: Union[OptionType, str], parser: "dns.wire.Parser"
|
||||||
) -> Option:
|
) -> Option:
|
||||||
the_code = EDECode.make(parser.get_uint16())
|
code = EDECode.make(parser.get_uint16())
|
||||||
text = parser.get_remaining()
|
text = parser.get_remaining()
|
||||||
|
|
||||||
if text:
|
if text:
|
||||||
|
@ -390,7 +389,7 @@ class EDEOption(Option): # lgtm[py/missing-equals]
|
||||||
else:
|
else:
|
||||||
btext = None
|
btext = None
|
||||||
|
|
||||||
return cls(the_code, btext)
|
return cls(code, btext)
|
||||||
|
|
||||||
|
|
||||||
_type_to_class: Dict[OptionType, Any] = {
|
_type_to_class: Dict[OptionType, Any] = {
|
||||||
|
@ -424,8 +423,8 @@ def option_from_wire_parser(
|
||||||
|
|
||||||
Returns an instance of a subclass of ``dns.edns.Option``.
|
Returns an instance of a subclass of ``dns.edns.Option``.
|
||||||
"""
|
"""
|
||||||
the_otype = OptionType.make(otype)
|
otype = OptionType.make(otype)
|
||||||
cls = get_option_class(the_otype)
|
cls = get_option_class(otype)
|
||||||
return cls.from_wire_parser(otype, parser)
|
return cls.from_wire_parser(otype, parser)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,17 +15,15 @@
|
||||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
import os
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
class EntropyPool:
|
class EntropyPool:
|
||||||
|
|
||||||
# This is an entropy pool for Python implementations that do not
|
# This is an entropy pool for Python implementations that do not
|
||||||
# have a working SystemRandom. I'm not sure there are any, but
|
# have a working SystemRandom. I'm not sure there are any, but
|
||||||
# leaving this code doesn't hurt anything as the library code
|
# leaving this code doesn't hurt anything as the library code
|
||||||
|
|
|
@ -16,18 +16,31 @@
|
||||||
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
|
from typing import Type, TypeVar, Union
|
||||||
|
|
||||||
|
TIntEnum = TypeVar("TIntEnum", bound="IntEnum")
|
||||||
|
|
||||||
|
|
||||||
class IntEnum(enum.IntEnum):
|
class IntEnum(enum.IntEnum):
|
||||||
@classmethod
|
@classmethod
|
||||||
def _check_value(cls, value):
|
def _missing_(cls, value):
|
||||||
max = cls._maximum()
|
cls._check_value(value)
|
||||||
if value < 0 or value > max:
|
val = int.__new__(cls, value)
|
||||||
name = cls._short_name()
|
val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}"
|
||||||
raise ValueError(f"{name} must be between >= 0 and <= {max}")
|
val._value_ = value
|
||||||
|
return val
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_text(cls, text):
|
def _check_value(cls, value):
|
||||||
|
max = cls._maximum()
|
||||||
|
if not isinstance(value, int):
|
||||||
|
raise TypeError
|
||||||
|
if value < 0 or value > max:
|
||||||
|
name = cls._short_name()
|
||||||
|
raise ValueError(f"{name} must be an int between >= 0 and <= {max}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum:
|
||||||
text = text.upper()
|
text = text.upper()
|
||||||
try:
|
try:
|
||||||
return cls[text]
|
return cls[text]
|
||||||
|
@ -47,7 +60,7 @@ class IntEnum(enum.IntEnum):
|
||||||
raise cls._unknown_exception_class()
|
raise cls._unknown_exception_class()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_text(cls, value):
|
def to_text(cls: Type[TIntEnum], value: int) -> str:
|
||||||
cls._check_value(value)
|
cls._check_value(value)
|
||||||
try:
|
try:
|
||||||
text = cls(value).name
|
text = cls(value).name
|
||||||
|
@ -59,7 +72,7 @@ class IntEnum(enum.IntEnum):
|
||||||
return text
|
return text
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def make(cls, value):
|
def make(cls: Type[TIntEnum], value: Union[int, str]) -> TIntEnum:
|
||||||
"""Convert text or a value into an enumerated type, if possible.
|
"""Convert text or a value into an enumerated type, if possible.
|
||||||
|
|
||||||
*value*, the ``int`` or ``str`` to convert.
|
*value*, the ``int`` or ``str`` to convert.
|
||||||
|
@ -76,10 +89,7 @@ class IntEnum(enum.IntEnum):
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
return cls.from_text(value)
|
return cls.from_text(value)
|
||||||
cls._check_value(value)
|
cls._check_value(value)
|
||||||
try:
|
|
||||||
return cls(value)
|
return cls(value)
|
||||||
except ValueError:
|
|
||||||
return value
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _maximum(cls):
|
def _maximum(cls):
|
||||||
|
|
|
@ -140,6 +140,22 @@ class Timeout(DNSException):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedAlgorithm(DNSException):
|
||||||
|
"""The DNSSEC algorithm is not supported."""
|
||||||
|
|
||||||
|
|
||||||
|
class AlgorithmKeyMismatch(UnsupportedAlgorithm):
|
||||||
|
"""The DNSSEC algorithm is not supported for the given key type."""
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationFailure(DNSException):
|
||||||
|
"""The DNSSEC signature is invalid."""
|
||||||
|
|
||||||
|
|
||||||
|
class DeniedByPolicy(DNSException):
|
||||||
|
"""Denied by DNSSEC policy."""
|
||||||
|
|
||||||
|
|
||||||
class ExceptionWrapper:
|
class ExceptionWrapper:
|
||||||
def __init__(self, exception_class):
|
def __init__(self, exception_class):
|
||||||
self.exception_class = exception_class
|
self.exception_class = exception_class
|
||||||
|
|
|
@ -17,9 +17,8 @@
|
||||||
|
|
||||||
"""DNS Message Flags."""
|
"""DNS Message Flags."""
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
# Standard DNS flags
|
# Standard DNS flags
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import collections.abc
|
import collections.abc
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from dns._immutable_ctx import immutable
|
from dns._immutable_ctx import immutable
|
||||||
|
|
||||||
|
|
|
@ -17,14 +17,12 @@
|
||||||
|
|
||||||
"""Generic Internet address helper functions."""
|
"""Generic Internet address helper functions."""
|
||||||
|
|
||||||
from typing import Any, Optional, Tuple
|
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
|
from typing import Any, Optional, Tuple
|
||||||
|
|
||||||
import dns.ipv4
|
import dns.ipv4
|
||||||
import dns.ipv6
|
import dns.ipv6
|
||||||
|
|
||||||
|
|
||||||
# We assume that AF_INET and AF_INET6 are always defined. We keep
|
# We assume that AF_INET and AF_INET6 are always defined. We keep
|
||||||
# these here for the benefit of any old code (unlikely though that
|
# these here for the benefit of any old code (unlikely though that
|
||||||
# is!).
|
# is!).
|
||||||
|
@ -171,3 +169,12 @@ def low_level_address_tuple(
|
||||||
return tup
|
return tup
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(f"unknown address family {af}")
|
raise NotImplementedError(f"unknown address family {af}")
|
||||||
|
|
||||||
|
|
||||||
|
def any_for_af(af):
|
||||||
|
"""Return the 'any' address for the specified address family."""
|
||||||
|
if af == socket.AF_INET:
|
||||||
|
return "0.0.0.0"
|
||||||
|
elif af == socket.AF_INET6:
|
||||||
|
return "::"
|
||||||
|
raise NotImplementedError(f"unknown address family {af}")
|
||||||
|
|
|
@ -17,9 +17,8 @@
|
||||||
|
|
||||||
"""IPv4 helper functions."""
|
"""IPv4 helper functions."""
|
||||||
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
import struct
|
import struct
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
|
|
||||||
|
|
|
@ -17,10 +17,9 @@
|
||||||
|
|
||||||
"""IPv6 helper functions."""
|
"""IPv6 helper functions."""
|
||||||
|
|
||||||
from typing import List, Union
|
|
||||||
|
|
||||||
import re
|
|
||||||
import binascii
|
import binascii
|
||||||
|
import re
|
||||||
|
from typing import List, Union
|
||||||
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.ipv4
|
import dns.ipv4
|
||||||
|
|
|
@ -17,30 +17,29 @@
|
||||||
|
|
||||||
"""DNS Messages"""
|
"""DNS Messages"""
|
||||||
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import io
|
import io
|
||||||
import time
|
import time
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import dns.wire
|
|
||||||
import dns.edns
|
import dns.edns
|
||||||
|
import dns.entropy
|
||||||
import dns.enum
|
import dns.enum
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.flags
|
import dns.flags
|
||||||
import dns.name
|
import dns.name
|
||||||
import dns.opcode
|
import dns.opcode
|
||||||
import dns.entropy
|
|
||||||
import dns.rcode
|
import dns.rcode
|
||||||
import dns.rdata
|
import dns.rdata
|
||||||
import dns.rdataclass
|
import dns.rdataclass
|
||||||
import dns.rdatatype
|
import dns.rdatatype
|
||||||
import dns.rrset
|
|
||||||
import dns.renderer
|
|
||||||
import dns.ttl
|
|
||||||
import dns.tsig
|
|
||||||
import dns.rdtypes.ANY.OPT
|
import dns.rdtypes.ANY.OPT
|
||||||
import dns.rdtypes.ANY.TSIG
|
import dns.rdtypes.ANY.TSIG
|
||||||
|
import dns.renderer
|
||||||
|
import dns.rrset
|
||||||
|
import dns.tsig
|
||||||
|
import dns.ttl
|
||||||
|
import dns.wire
|
||||||
|
|
||||||
|
|
||||||
class ShortHeader(dns.exception.FormError):
|
class ShortHeader(dns.exception.FormError):
|
||||||
|
@ -135,7 +134,7 @@ IndexKeyType = Tuple[
|
||||||
Optional[dns.rdataclass.RdataClass],
|
Optional[dns.rdataclass.RdataClass],
|
||||||
]
|
]
|
||||||
IndexType = Dict[IndexKeyType, dns.rrset.RRset]
|
IndexType = Dict[IndexKeyType, dns.rrset.RRset]
|
||||||
SectionType = Union[int, List[dns.rrset.RRset]]
|
SectionType = Union[int, str, List[dns.rrset.RRset]]
|
||||||
|
|
||||||
|
|
||||||
class Message:
|
class Message:
|
||||||
|
@ -231,7 +230,7 @@ class Message:
|
||||||
s.write("payload %d\n" % self.payload)
|
s.write("payload %d\n" % self.payload)
|
||||||
for opt in self.options:
|
for opt in self.options:
|
||||||
s.write("option %s\n" % opt.to_text())
|
s.write("option %s\n" % opt.to_text())
|
||||||
for (name, which) in self._section_enum.__members__.items():
|
for name, which in self._section_enum.__members__.items():
|
||||||
s.write(f";{name}\n")
|
s.write(f";{name}\n")
|
||||||
for rrset in self.section_from_number(which):
|
for rrset in self.section_from_number(which):
|
||||||
s.write(rrset.to_text(origin, relativize, **kw))
|
s.write(rrset.to_text(origin, relativize, **kw))
|
||||||
|
@ -348,27 +347,29 @@ class Message:
|
||||||
deleting: Optional[dns.rdataclass.RdataClass] = None,
|
deleting: Optional[dns.rdataclass.RdataClass] = None,
|
||||||
create: bool = False,
|
create: bool = False,
|
||||||
force_unique: bool = False,
|
force_unique: bool = False,
|
||||||
|
idna_codec: Optional[dns.name.IDNACodec] = None,
|
||||||
) -> dns.rrset.RRset:
|
) -> dns.rrset.RRset:
|
||||||
"""Find the RRset with the given attributes in the specified section.
|
"""Find the RRset with the given attributes in the specified section.
|
||||||
|
|
||||||
*section*, an ``int`` section number, or one of the section
|
*section*, an ``int`` section number, a ``str`` section name, or one of
|
||||||
attributes of this message. This specifies the
|
the section attributes of this message. This specifies the
|
||||||
the section of the message to search. For example::
|
the section of the message to search. For example::
|
||||||
|
|
||||||
my_message.find_rrset(my_message.answer, name, rdclass, rdtype)
|
my_message.find_rrset(my_message.answer, name, rdclass, rdtype)
|
||||||
my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype)
|
my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype)
|
||||||
|
my_message.find_rrset("ANSWER", name, rdclass, rdtype)
|
||||||
|
|
||||||
*name*, a ``dns.name.Name``, the name of the RRset.
|
*name*, a ``dns.name.Name`` or ``str``, the name of the RRset.
|
||||||
|
|
||||||
*rdclass*, an ``int``, the class of the RRset.
|
*rdclass*, an ``int`` or ``str``, the class of the RRset.
|
||||||
|
|
||||||
*rdtype*, an ``int``, the type of the RRset.
|
*rdtype*, an ``int`` or ``str``, the type of the RRset.
|
||||||
|
|
||||||
*covers*, an ``int`` or ``None``, the covers value of the RRset.
|
*covers*, an ``int`` or ``str``, the covers value of the RRset.
|
||||||
The default is ``None``.
|
The default is ``dns.rdatatype.NONE``.
|
||||||
|
|
||||||
*deleting*, an ``int`` or ``None``, the deleting value of the RRset.
|
*deleting*, an ``int``, ``str``, or ``None``, the deleting value of the
|
||||||
The default is ``None``.
|
RRset. The default is ``None``.
|
||||||
|
|
||||||
*create*, a ``bool``. If ``True``, create the RRset if it is not found.
|
*create*, a ``bool``. If ``True``, create the RRset if it is not found.
|
||||||
The created RRset is appended to *section*.
|
The created RRset is appended to *section*.
|
||||||
|
@ -378,6 +379,10 @@ class Message:
|
||||||
already. The default is ``False``. This is useful when creating
|
already. The default is ``False``. This is useful when creating
|
||||||
DDNS Update messages, as order matters for them.
|
DDNS Update messages, as order matters for them.
|
||||||
|
|
||||||
|
*idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA
|
||||||
|
encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder
|
||||||
|
is used.
|
||||||
|
|
||||||
Raises ``KeyError`` if the RRset was not found and create was
|
Raises ``KeyError`` if the RRset was not found and create was
|
||||||
``False``.
|
``False``.
|
||||||
|
|
||||||
|
@ -386,10 +391,19 @@ class Message:
|
||||||
|
|
||||||
if isinstance(section, int):
|
if isinstance(section, int):
|
||||||
section_number = section
|
section_number = section
|
||||||
the_section = self.section_from_number(section_number)
|
section = self.section_from_number(section_number)
|
||||||
|
elif isinstance(section, str):
|
||||||
|
section_number = MessageSection.from_text(section)
|
||||||
|
section = self.section_from_number(section_number)
|
||||||
else:
|
else:
|
||||||
section_number = self.section_number(section)
|
section_number = self.section_number(section)
|
||||||
the_section = section
|
if isinstance(name, str):
|
||||||
|
name = dns.name.from_text(name, idna_codec=idna_codec)
|
||||||
|
rdtype = dns.rdatatype.RdataType.make(rdtype)
|
||||||
|
rdclass = dns.rdataclass.RdataClass.make(rdclass)
|
||||||
|
covers = dns.rdatatype.RdataType.make(covers)
|
||||||
|
if deleting is not None:
|
||||||
|
deleting = dns.rdataclass.RdataClass.make(deleting)
|
||||||
key = (section_number, name, rdclass, rdtype, covers, deleting)
|
key = (section_number, name, rdclass, rdtype, covers, deleting)
|
||||||
if not force_unique:
|
if not force_unique:
|
||||||
if self.index is not None:
|
if self.index is not None:
|
||||||
|
@ -397,13 +411,13 @@ class Message:
|
||||||
if rrset is not None:
|
if rrset is not None:
|
||||||
return rrset
|
return rrset
|
||||||
else:
|
else:
|
||||||
for rrset in the_section:
|
for rrset in section:
|
||||||
if rrset.full_match(name, rdclass, rdtype, covers, deleting):
|
if rrset.full_match(name, rdclass, rdtype, covers, deleting):
|
||||||
return rrset
|
return rrset
|
||||||
if not create:
|
if not create:
|
||||||
raise KeyError
|
raise KeyError
|
||||||
rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting)
|
rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting)
|
||||||
the_section.append(rrset)
|
section.append(rrset)
|
||||||
if self.index is not None:
|
if self.index is not None:
|
||||||
self.index[key] = rrset
|
self.index[key] = rrset
|
||||||
return rrset
|
return rrset
|
||||||
|
@ -418,29 +432,31 @@ class Message:
|
||||||
deleting: Optional[dns.rdataclass.RdataClass] = None,
|
deleting: Optional[dns.rdataclass.RdataClass] = None,
|
||||||
create: bool = False,
|
create: bool = False,
|
||||||
force_unique: bool = False,
|
force_unique: bool = False,
|
||||||
|
idna_codec: Optional[dns.name.IDNACodec] = None,
|
||||||
) -> Optional[dns.rrset.RRset]:
|
) -> Optional[dns.rrset.RRset]:
|
||||||
"""Get the RRset with the given attributes in the specified section.
|
"""Get the RRset with the given attributes in the specified section.
|
||||||
|
|
||||||
If the RRset is not found, None is returned.
|
If the RRset is not found, None is returned.
|
||||||
|
|
||||||
*section*, an ``int`` section number, or one of the section
|
*section*, an ``int`` section number, a ``str`` section name, or one of
|
||||||
attributes of this message. This specifies the
|
the section attributes of this message. This specifies the
|
||||||
the section of the message to search. For example::
|
the section of the message to search. For example::
|
||||||
|
|
||||||
my_message.get_rrset(my_message.answer, name, rdclass, rdtype)
|
my_message.get_rrset(my_message.answer, name, rdclass, rdtype)
|
||||||
my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype)
|
my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype)
|
||||||
|
my_message.get_rrset("ANSWER", name, rdclass, rdtype)
|
||||||
|
|
||||||
*name*, a ``dns.name.Name``, the name of the RRset.
|
*name*, a ``dns.name.Name`` or ``str``, the name of the RRset.
|
||||||
|
|
||||||
*rdclass*, an ``int``, the class of the RRset.
|
*rdclass*, an ``int`` or ``str``, the class of the RRset.
|
||||||
|
|
||||||
*rdtype*, an ``int``, the type of the RRset.
|
*rdtype*, an ``int`` or ``str``, the type of the RRset.
|
||||||
|
|
||||||
*covers*, an ``int`` or ``None``, the covers value of the RRset.
|
*covers*, an ``int`` or ``str``, the covers value of the RRset.
|
||||||
The default is ``None``.
|
The default is ``dns.rdatatype.NONE``.
|
||||||
|
|
||||||
*deleting*, an ``int`` or ``None``, the deleting value of the RRset.
|
*deleting*, an ``int``, ``str``, or ``None``, the deleting value of the
|
||||||
The default is ``None``.
|
RRset. The default is ``None``.
|
||||||
|
|
||||||
*create*, a ``bool``. If ``True``, create the RRset if it is not found.
|
*create*, a ``bool``. If ``True``, create the RRset if it is not found.
|
||||||
The created RRset is appended to *section*.
|
The created RRset is appended to *section*.
|
||||||
|
@ -450,12 +466,24 @@ class Message:
|
||||||
already. The default is ``False``. This is useful when creating
|
already. The default is ``False``. This is useful when creating
|
||||||
DDNS Update messages, as order matters for them.
|
DDNS Update messages, as order matters for them.
|
||||||
|
|
||||||
|
*idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA
|
||||||
|
encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder
|
||||||
|
is used.
|
||||||
|
|
||||||
Returns a ``dns.rrset.RRset object`` or ``None``.
|
Returns a ``dns.rrset.RRset object`` or ``None``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
rrset = self.find_rrset(
|
rrset = self.find_rrset(
|
||||||
section, name, rdclass, rdtype, covers, deleting, create, force_unique
|
section,
|
||||||
|
name,
|
||||||
|
rdclass,
|
||||||
|
rdtype,
|
||||||
|
covers,
|
||||||
|
deleting,
|
||||||
|
create,
|
||||||
|
force_unique,
|
||||||
|
idna_codec,
|
||||||
)
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
rrset = None
|
rrset = None
|
||||||
|
@ -1708,13 +1736,11 @@ def make_query(
|
||||||
|
|
||||||
if isinstance(qname, str):
|
if isinstance(qname, str):
|
||||||
qname = dns.name.from_text(qname, idna_codec=idna_codec)
|
qname = dns.name.from_text(qname, idna_codec=idna_codec)
|
||||||
the_rdtype = dns.rdatatype.RdataType.make(rdtype)
|
rdtype = dns.rdatatype.RdataType.make(rdtype)
|
||||||
the_rdclass = dns.rdataclass.RdataClass.make(rdclass)
|
rdclass = dns.rdataclass.RdataClass.make(rdclass)
|
||||||
m = QueryMessage(id=id)
|
m = QueryMessage(id=id)
|
||||||
m.flags = dns.flags.Flag(flags)
|
m.flags = dns.flags.Flag(flags)
|
||||||
m.find_rrset(
|
m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True)
|
||||||
m.question, qname, the_rdclass, the_rdtype, create=True, force_unique=True
|
|
||||||
)
|
|
||||||
# only pass keywords on to use_edns if they have been set to a
|
# only pass keywords on to use_edns if they have been set to a
|
||||||
# non-None value. Setting a field will turn EDNS on if it hasn't
|
# non-None value. Setting a field will turn EDNS on if it hasn't
|
||||||
# been configured.
|
# been configured.
|
||||||
|
|
|
@ -18,12 +18,10 @@
|
||||||
"""DNS Names.
|
"""DNS Names.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Any, Dict, Iterable, Optional, Tuple, Union
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import struct
|
|
||||||
|
|
||||||
import encodings.idna # type: ignore
|
import encodings.idna # type: ignore
|
||||||
|
import struct
|
||||||
|
from typing import Any, Dict, Iterable, Optional, Tuple, Union
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import idna # type: ignore
|
import idna # type: ignore
|
||||||
|
@ -33,10 +31,9 @@ except ImportError: # pragma: no cover
|
||||||
have_idna_2008 = False
|
have_idna_2008 = False
|
||||||
|
|
||||||
import dns.enum
|
import dns.enum
|
||||||
import dns.wire
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.immutable
|
import dns.immutable
|
||||||
|
import dns.wire
|
||||||
|
|
||||||
CompressType = Dict["Name", int]
|
CompressType = Dict["Name", int]
|
||||||
|
|
||||||
|
|
329
lib/dns/nameserver.py
Normal file
329
lib/dns/nameserver.py
Normal file
|
@ -0,0 +1,329 @@
|
||||||
|
from typing import Optional, Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import dns.asyncbackend
|
||||||
|
import dns.asyncquery
|
||||||
|
import dns.inet
|
||||||
|
import dns.message
|
||||||
|
import dns.query
|
||||||
|
|
||||||
|
|
||||||
|
class Nameserver:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def kind(self) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_always_max_size(self) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def answer_nameserver(self) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def answer_port(self) -> int:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def async_query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
backend: dns.asyncbackend.Backend,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class AddressAndPortNameserver(Nameserver):
|
||||||
|
def __init__(self, address: str, port: int):
|
||||||
|
super().__init__()
|
||||||
|
self.address = address
|
||||||
|
self.port = port
|
||||||
|
|
||||||
|
def kind(self) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_always_max_size(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
ns_kind = self.kind()
|
||||||
|
return f"{ns_kind}:{self.address}@{self.port}"
|
||||||
|
|
||||||
|
def answer_nameserver(self) -> str:
|
||||||
|
return self.address
|
||||||
|
|
||||||
|
def answer_port(self) -> int:
|
||||||
|
return self.port
|
||||||
|
|
||||||
|
|
||||||
|
class Do53Nameserver(AddressAndPortNameserver):
|
||||||
|
def __init__(self, address: str, port: int = 53):
|
||||||
|
super().__init__(address, port)
|
||||||
|
|
||||||
|
def kind(self):
|
||||||
|
return "Do53"
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
if max_size:
|
||||||
|
response = dns.query.tcp(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
timeout=timeout,
|
||||||
|
port=self.port,
|
||||||
|
source=source,
|
||||||
|
source_port=source_port,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = dns.query.udp(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
timeout=timeout,
|
||||||
|
port=self.port,
|
||||||
|
source=source,
|
||||||
|
source_port=source_port,
|
||||||
|
raise_on_truncation=True,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
async def async_query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
backend: dns.asyncbackend.Backend,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
if max_size:
|
||||||
|
response = await dns.asyncquery.tcp(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
timeout=timeout,
|
||||||
|
port=self.port,
|
||||||
|
source=source,
|
||||||
|
source_port=source_port,
|
||||||
|
backend=backend,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = await dns.asyncquery.udp(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
timeout=timeout,
|
||||||
|
port=self.port,
|
||||||
|
source=source,
|
||||||
|
source_port=source_port,
|
||||||
|
raise_on_truncation=True,
|
||||||
|
backend=backend,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class DoHNameserver(Nameserver):
|
||||||
|
def __init__(self, url: str, bootstrap_address: Optional[str] = None):
|
||||||
|
super().__init__()
|
||||||
|
self.url = url
|
||||||
|
self.bootstrap_address = bootstrap_address
|
||||||
|
|
||||||
|
def kind(self):
|
||||||
|
return "DoH"
|
||||||
|
|
||||||
|
def is_always_max_size(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
def answer_nameserver(self) -> str:
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
def answer_port(self) -> int:
|
||||||
|
port = urlparse(self.url).port
|
||||||
|
if port is None:
|
||||||
|
port = 443
|
||||||
|
return port
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool = False,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return dns.query.https(
|
||||||
|
request,
|
||||||
|
self.url,
|
||||||
|
timeout=timeout,
|
||||||
|
bootstrap_address=self.bootstrap_address,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
backend: dns.asyncbackend.Backend,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return await dns.asyncquery.https(
|
||||||
|
request,
|
||||||
|
self.url,
|
||||||
|
timeout=timeout,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DoTNameserver(AddressAndPortNameserver):
|
||||||
|
def __init__(self, address: str, port: int = 853, hostname: Optional[str] = None):
|
||||||
|
super().__init__(address, port)
|
||||||
|
self.hostname = hostname
|
||||||
|
|
||||||
|
def kind(self):
|
||||||
|
return "DoT"
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool = False,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return dns.query.tls(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
port=self.port,
|
||||||
|
timeout=timeout,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
server_hostname=self.hostname,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
backend: dns.asyncbackend.Backend,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return await dns.asyncquery.tls(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
port=self.port,
|
||||||
|
timeout=timeout,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
server_hostname=self.hostname,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DoQNameserver(AddressAndPortNameserver):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
port: int = 853,
|
||||||
|
verify: Union[bool, str] = True,
|
||||||
|
server_hostname: Optional[str] = None,
|
||||||
|
):
|
||||||
|
super().__init__(address, port)
|
||||||
|
self.verify = verify
|
||||||
|
self.server_hostname = server_hostname
|
||||||
|
|
||||||
|
def kind(self):
|
||||||
|
return "DoQ"
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool = False,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return dns.query.quic(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
port=self.port,
|
||||||
|
timeout=timeout,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
verify=self.verify,
|
||||||
|
server_hostname=self.server_hostname,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_query(
|
||||||
|
self,
|
||||||
|
request: dns.message.QueryMessage,
|
||||||
|
timeout: float,
|
||||||
|
source: Optional[str],
|
||||||
|
source_port: int,
|
||||||
|
max_size: bool,
|
||||||
|
backend: dns.asyncbackend.Backend,
|
||||||
|
one_rr_per_rrset: bool = False,
|
||||||
|
ignore_trailing: bool = False,
|
||||||
|
) -> dns.message.Message:
|
||||||
|
return await dns.asyncquery.quic(
|
||||||
|
request,
|
||||||
|
self.address,
|
||||||
|
port=self.port,
|
||||||
|
timeout=timeout,
|
||||||
|
one_rr_per_rrset=one_rr_per_rrset,
|
||||||
|
ignore_trailing=ignore_trailing,
|
||||||
|
verify=self.verify,
|
||||||
|
server_hostname=self.server_hostname,
|
||||||
|
)
|
|
@ -17,19 +17,17 @@
|
||||||
|
|
||||||
"""DNS nodes. A node is a set of rdatasets."""
|
"""DNS nodes. A node is a set of rdatasets."""
|
||||||
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
import io
|
import io
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
import dns.immutable
|
import dns.immutable
|
||||||
import dns.name
|
import dns.name
|
||||||
import dns.rdataclass
|
import dns.rdataclass
|
||||||
import dns.rdataset
|
import dns.rdataset
|
||||||
import dns.rdatatype
|
import dns.rdatatype
|
||||||
import dns.rrset
|
|
||||||
import dns.renderer
|
import dns.renderer
|
||||||
|
import dns.rrset
|
||||||
|
|
||||||
_cname_types = {
|
_cname_types = {
|
||||||
dns.rdatatype.CNAME,
|
dns.rdatatype.CNAME,
|
||||||
|
|
248
lib/dns/query.py
248
lib/dns/query.py
|
@ -17,8 +17,6 @@
|
||||||
|
|
||||||
"""Talk to a DNS server."""
|
"""Talk to a DNS server."""
|
||||||
|
|
||||||
from typing import Any, Dict, Optional, Tuple, Union
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import contextlib
|
import contextlib
|
||||||
import enum
|
import enum
|
||||||
|
@ -28,12 +26,12 @@ import selectors
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
import time
|
import time
|
||||||
import urllib.parse
|
from typing import Any, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
import dns.inet
|
import dns.inet
|
||||||
import dns.name
|
|
||||||
import dns.message
|
import dns.message
|
||||||
|
import dns.name
|
||||||
import dns.quic
|
import dns.quic
|
||||||
import dns.rcode
|
import dns.rcode
|
||||||
import dns.rdataclass
|
import dns.rdataclass
|
||||||
|
@ -43,20 +41,32 @@ import dns.transaction
|
||||||
import dns.tsig
|
import dns.tsig
|
||||||
import dns.xfr
|
import dns.xfr
|
||||||
|
|
||||||
try:
|
|
||||||
import requests
|
|
||||||
from requests_toolbelt.adapters.source import SourceAddressAdapter
|
|
||||||
from requests_toolbelt.adapters.host_header_ssl import HostHeaderSSLAdapter
|
|
||||||
|
|
||||||
_have_requests = True
|
def _remaining(expiration):
|
||||||
except ImportError: # pragma: no cover
|
if expiration is None:
|
||||||
_have_requests = False
|
return None
|
||||||
|
timeout = expiration - time.time()
|
||||||
|
if timeout <= 0.0:
|
||||||
|
raise dns.exception.Timeout
|
||||||
|
return timeout
|
||||||
|
|
||||||
|
|
||||||
|
def _expiration_for_this_attempt(timeout, expiration):
|
||||||
|
if expiration is None:
|
||||||
|
return None
|
||||||
|
return min(time.time() + timeout, expiration)
|
||||||
|
|
||||||
|
|
||||||
_have_httpx = False
|
_have_httpx = False
|
||||||
_have_http2 = False
|
_have_http2 = False
|
||||||
try:
|
try:
|
||||||
|
import httpcore
|
||||||
|
import httpcore._backends.sync
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
|
_CoreNetworkBackend = httpcore.NetworkBackend
|
||||||
|
_CoreSyncStream = httpcore._backends.sync.SyncStream
|
||||||
|
|
||||||
_have_httpx = True
|
_have_httpx = True
|
||||||
try:
|
try:
|
||||||
# See if http2 support is available.
|
# See if http2 support is available.
|
||||||
|
@ -64,10 +74,87 @@ try:
|
||||||
_have_http2 = True
|
_have_http2 = True
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
|
|
||||||
have_doh = _have_requests or _have_httpx
|
class _NetworkBackend(_CoreNetworkBackend):
|
||||||
|
def __init__(self, resolver, local_port, bootstrap_address, family):
|
||||||
|
super().__init__()
|
||||||
|
self._local_port = local_port
|
||||||
|
self._resolver = resolver
|
||||||
|
self._bootstrap_address = bootstrap_address
|
||||||
|
self._family = family
|
||||||
|
|
||||||
|
def connect_tcp(
|
||||||
|
self, host, port, timeout, local_address, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
addresses = []
|
||||||
|
_, expiration = _compute_times(timeout)
|
||||||
|
if dns.inet.is_address(host):
|
||||||
|
addresses.append(host)
|
||||||
|
elif self._bootstrap_address is not None:
|
||||||
|
addresses.append(self._bootstrap_address)
|
||||||
|
else:
|
||||||
|
timeout = _remaining(expiration)
|
||||||
|
family = self._family
|
||||||
|
if local_address:
|
||||||
|
family = dns.inet.af_for_address(local_address)
|
||||||
|
answers = self._resolver.resolve_name(
|
||||||
|
host, family=family, lifetime=timeout
|
||||||
|
)
|
||||||
|
addresses = answers.addresses()
|
||||||
|
for address in addresses:
|
||||||
|
af = dns.inet.af_for_address(address)
|
||||||
|
if local_address is not None or self._local_port != 0:
|
||||||
|
source = dns.inet.low_level_address_tuple(
|
||||||
|
(local_address, self._local_port), af
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
source = None
|
||||||
|
sock = _make_socket(af, socket.SOCK_STREAM, source)
|
||||||
|
attempt_expiration = _expiration_for_this_attempt(2.0, expiration)
|
||||||
|
try:
|
||||||
|
_connect(
|
||||||
|
sock,
|
||||||
|
dns.inet.low_level_address_tuple((address, port), af),
|
||||||
|
attempt_expiration,
|
||||||
|
)
|
||||||
|
return _CoreSyncStream(sock)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise httpcore.ConnectError
|
||||||
|
|
||||||
|
def connect_unix_socket(
|
||||||
|
self, path, timeout, socket_options=None
|
||||||
|
): # pylint: disable=signature-differs
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
class _HTTPTransport(httpx.HTTPTransport):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
local_port=0,
|
||||||
|
bootstrap_address=None,
|
||||||
|
resolver=None,
|
||||||
|
family=socket.AF_UNSPEC,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
if resolver is None:
|
||||||
|
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
||||||
|
import dns.resolver
|
||||||
|
|
||||||
|
resolver = dns.resolver.Resolver()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self._pool._network_backend = _NetworkBackend(
|
||||||
|
resolver, local_port, bootstrap_address, family
|
||||||
|
)
|
||||||
|
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
|
||||||
|
class _HTTPTransport: # type: ignore
|
||||||
|
def connect_tcp(self, host, port, timeout, local_address):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
have_doh = _have_httpx
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import ssl
|
import ssl
|
||||||
|
@ -88,7 +175,7 @@ except ImportError: # pragma: no cover
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_default_context(cls, *args, **kwargs):
|
def create_default_context(cls, *args, **kwargs):
|
||||||
raise Exception("no ssl support")
|
raise Exception("no ssl support") # pylint: disable=broad-exception-raised
|
||||||
|
|
||||||
|
|
||||||
# Function used to create a socket. Can be overridden if needed in special
|
# Function used to create a socket. Can be overridden if needed in special
|
||||||
|
@ -105,7 +192,7 @@ class BadResponse(dns.exception.FormError):
|
||||||
|
|
||||||
|
|
||||||
class NoDOH(dns.exception.DNSException):
|
class NoDOH(dns.exception.DNSException):
|
||||||
"""DNS over HTTPS (DOH) was requested but the requests module is not
|
"""DNS over HTTPS (DOH) was requested but the httpx module is not
|
||||||
available."""
|
available."""
|
||||||
|
|
||||||
|
|
||||||
|
@ -230,7 +317,7 @@ def _destination_and_source(
|
||||||
# We know the destination af, so source had better agree!
|
# We know the destination af, so source had better agree!
|
||||||
if saf != af:
|
if saf != af:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"different address families for source " + "and destination"
|
"different address families for source and destination"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# We didn't know the destination af, but we know the source,
|
# We didn't know the destination af, but we know the source,
|
||||||
|
@ -240,11 +327,10 @@ def _destination_and_source(
|
||||||
# Caller has specified a source_port but not an address, so we
|
# Caller has specified a source_port but not an address, so we
|
||||||
# need to return a source, and we need to use the appropriate
|
# need to return a source, and we need to use the appropriate
|
||||||
# wildcard address as the address.
|
# wildcard address as the address.
|
||||||
if af == socket.AF_INET:
|
try:
|
||||||
source = "0.0.0.0"
|
source = dns.inet.any_for_af(af)
|
||||||
elif af == socket.AF_INET6:
|
except Exception:
|
||||||
source = "::"
|
# we catch this and raise ValueError for backwards compatibility
|
||||||
else:
|
|
||||||
raise ValueError("source_port specified but address family is unknown")
|
raise ValueError("source_port specified but address family is unknown")
|
||||||
# Convert high-level (address, port) tuples into low-level address
|
# Convert high-level (address, port) tuples into low-level address
|
||||||
# tuples.
|
# tuples.
|
||||||
|
@ -289,6 +375,8 @@ def https(
|
||||||
post: bool = True,
|
post: bool = True,
|
||||||
bootstrap_address: Optional[str] = None,
|
bootstrap_address: Optional[str] = None,
|
||||||
verify: Union[bool, str] = True,
|
verify: Union[bool, str] = True,
|
||||||
|
resolver: Optional["dns.resolver.Resolver"] = None,
|
||||||
|
family: Optional[int] = socket.AF_UNSPEC,
|
||||||
) -> dns.message.Message:
|
) -> dns.message.Message:
|
||||||
"""Return the response obtained after sending a query via DNS-over-HTTPS.
|
"""Return the response obtained after sending a query via DNS-over-HTTPS.
|
||||||
|
|
||||||
|
@ -314,91 +402,78 @@ def https(
|
||||||
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the
|
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the
|
||||||
received message.
|
received message.
|
||||||
|
|
||||||
*session*, an ``httpx.Client`` or ``requests.session.Session``. If provided, the
|
*session*, an ``httpx.Client``. If provided, the client session to use to send the
|
||||||
client/session to use to send the queries.
|
queries.
|
||||||
|
|
||||||
*path*, a ``str``. If *where* is an IP address, then *path* will be used to
|
*path*, a ``str``. If *where* is an IP address, then *path* will be used to
|
||||||
construct the URL to send the DNS query to.
|
construct the URL to send the DNS query to.
|
||||||
|
|
||||||
*post*, a ``bool``. If ``True``, the default, POST method will be used.
|
*post*, a ``bool``. If ``True``, the default, POST method will be used.
|
||||||
|
|
||||||
*bootstrap_address*, a ``str``, the IP address to use to bypass the system's DNS
|
*bootstrap_address*, a ``str``, the IP address to use to bypass resolution.
|
||||||
resolver.
|
|
||||||
|
|
||||||
*verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification
|
*verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification
|
||||||
of the server is done using the default CA bundle; if ``False``, then no
|
of the server is done using the default CA bundle; if ``False``, then no
|
||||||
verification is done; if a `str` then it specifies the path to a certificate file or
|
verification is done; if a `str` then it specifies the path to a certificate file or
|
||||||
directory which will be used for verification.
|
directory which will be used for verification.
|
||||||
|
|
||||||
|
*resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for
|
||||||
|
resolution of hostnames in URLs. If not specified, a new resolver with a default
|
||||||
|
configuration will be used; note this is *not* the default resolver as that resolver
|
||||||
|
might have been configured to use DoH causing a chicken-and-egg problem. This
|
||||||
|
parameter only has an effect if the HTTP library is httpx.
|
||||||
|
|
||||||
|
*family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A
|
||||||
|
and AAAA records will be retrieved.
|
||||||
|
|
||||||
Returns a ``dns.message.Message``.
|
Returns a ``dns.message.Message``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not have_doh:
|
if not have_doh:
|
||||||
raise NoDOH("Neither httpx nor requests is available.") # pragma: no cover
|
raise NoDOH # pragma: no cover
|
||||||
|
if session and not isinstance(session, httpx.Client):
|
||||||
_httpx_ok = _have_httpx
|
raise ValueError("session parameter must be an httpx.Client")
|
||||||
|
|
||||||
wire = q.to_wire()
|
wire = q.to_wire()
|
||||||
(af, _, source) = _destination_and_source(where, port, source, source_port, False)
|
(af, _, the_source) = _destination_and_source(
|
||||||
transport_adapter = None
|
where, port, source, source_port, False
|
||||||
|
)
|
||||||
transport = None
|
transport = None
|
||||||
headers = {"accept": "application/dns-message"}
|
headers = {"accept": "application/dns-message"}
|
||||||
if af is not None:
|
if af is not None and dns.inet.is_address(where):
|
||||||
if af == socket.AF_INET:
|
if af == socket.AF_INET:
|
||||||
url = "https://{}:{}{}".format(where, port, path)
|
url = "https://{}:{}{}".format(where, port, path)
|
||||||
elif af == socket.AF_INET6:
|
elif af == socket.AF_INET6:
|
||||||
url = "https://[{}]:{}{}".format(where, port, path)
|
url = "https://[{}]:{}{}".format(where, port, path)
|
||||||
elif bootstrap_address is not None:
|
|
||||||
_httpx_ok = False
|
|
||||||
split_url = urllib.parse.urlsplit(where)
|
|
||||||
if split_url.hostname is None:
|
|
||||||
raise ValueError("DoH URL has no hostname")
|
|
||||||
headers["Host"] = split_url.hostname
|
|
||||||
url = where.replace(split_url.hostname, bootstrap_address)
|
|
||||||
if _have_requests:
|
|
||||||
transport_adapter = HostHeaderSSLAdapter()
|
|
||||||
else:
|
else:
|
||||||
url = where
|
url = where
|
||||||
if source is not None:
|
|
||||||
# set source port and source address
|
# set source port and source address
|
||||||
if _have_httpx:
|
|
||||||
if source_port == 0:
|
|
||||||
transport = httpx.HTTPTransport(local_address=source[0], verify=verify)
|
|
||||||
else:
|
|
||||||
_httpx_ok = False
|
|
||||||
if _have_requests:
|
|
||||||
transport_adapter = SourceAddressAdapter(source)
|
|
||||||
|
|
||||||
if session:
|
if the_source is None:
|
||||||
if _have_httpx:
|
local_address = None
|
||||||
_is_httpx = isinstance(session, httpx.Client)
|
local_port = 0
|
||||||
else:
|
else:
|
||||||
_is_httpx = False
|
local_address = the_source[0]
|
||||||
if _is_httpx and not _httpx_ok:
|
local_port = the_source[1]
|
||||||
raise NoDOH(
|
transport = _HTTPTransport(
|
||||||
"Session is httpx, but httpx cannot be used for "
|
local_address=local_address,
|
||||||
"the requested operation."
|
http1=True,
|
||||||
)
|
http2=_have_http2,
|
||||||
else:
|
verify=verify,
|
||||||
_is_httpx = _httpx_ok
|
local_port=local_port,
|
||||||
|
bootstrap_address=bootstrap_address,
|
||||||
if not _httpx_ok and not _have_requests:
|
resolver=resolver,
|
||||||
raise NoDOH(
|
family=family,
|
||||||
"Cannot use httpx for this operation, and requests is not available."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if session:
|
if session:
|
||||||
cm: contextlib.AbstractContextManager = contextlib.nullcontext(session)
|
cm: contextlib.AbstractContextManager = contextlib.nullcontext(session)
|
||||||
elif _is_httpx:
|
else:
|
||||||
cm = httpx.Client(
|
cm = httpx.Client(
|
||||||
http1=True, http2=_have_http2, verify=verify, transport=transport
|
http1=True, http2=_have_http2, verify=verify, transport=transport
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
cm = requests.sessions.Session()
|
|
||||||
with cm as session:
|
with cm as session:
|
||||||
if transport_adapter and not _is_httpx:
|
|
||||||
session.mount(url, transport_adapter)
|
|
||||||
|
|
||||||
# see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH
|
# see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH
|
||||||
# GET and POST examples
|
# GET and POST examples
|
||||||
if post:
|
if post:
|
||||||
|
@ -408,29 +483,13 @@ def https(
|
||||||
"content-length": str(len(wire)),
|
"content-length": str(len(wire)),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
if _is_httpx:
|
response = session.post(url, headers=headers, content=wire, timeout=timeout)
|
||||||
response = session.post(
|
|
||||||
url, headers=headers, content=wire, timeout=timeout
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
response = session.post(
|
|
||||||
url, headers=headers, data=wire, timeout=timeout, verify=verify
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
wire = base64.urlsafe_b64encode(wire).rstrip(b"=")
|
wire = base64.urlsafe_b64encode(wire).rstrip(b"=")
|
||||||
if _is_httpx:
|
|
||||||
twire = wire.decode() # httpx does a repr() if we give it bytes
|
twire = wire.decode() # httpx does a repr() if we give it bytes
|
||||||
response = session.get(
|
response = session.get(
|
||||||
url, headers=headers, timeout=timeout, params={"dns": twire}
|
url, headers=headers, timeout=timeout, params={"dns": twire}
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
response = session.get(
|
|
||||||
url,
|
|
||||||
headers=headers,
|
|
||||||
timeout=timeout,
|
|
||||||
verify=verify,
|
|
||||||
params={"dns": wire},
|
|
||||||
)
|
|
||||||
|
|
||||||
# see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH
|
# see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH
|
||||||
# status codes
|
# status codes
|
||||||
|
@ -1070,6 +1129,7 @@ def quic(
|
||||||
ignore_trailing: bool = False,
|
ignore_trailing: bool = False,
|
||||||
connection: Optional[dns.quic.SyncQuicConnection] = None,
|
connection: Optional[dns.quic.SyncQuicConnection] = None,
|
||||||
verify: Union[bool, str] = True,
|
verify: Union[bool, str] = True,
|
||||||
|
server_hostname: Optional[str] = None,
|
||||||
) -> dns.message.Message:
|
) -> dns.message.Message:
|
||||||
"""Return the response obtained after sending a query via DNS-over-QUIC.
|
"""Return the response obtained after sending a query via DNS-over-QUIC.
|
||||||
|
|
||||||
|
@ -1101,6 +1161,10 @@ def quic(
|
||||||
verification is done; if a `str` then it specifies the path to a certificate file or
|
verification is done; if a `str` then it specifies the path to a certificate file or
|
||||||
directory which will be used for verification.
|
directory which will be used for verification.
|
||||||
|
|
||||||
|
*server_hostname*, a ``str`` containing the server's hostname. The
|
||||||
|
default is ``None``, which means that no hostname is known, and if an
|
||||||
|
SSL context is created, hostname checking will be disabled.
|
||||||
|
|
||||||
Returns a ``dns.message.Message``.
|
Returns a ``dns.message.Message``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -1115,16 +1179,18 @@ def quic(
|
||||||
manager: contextlib.AbstractContextManager = contextlib.nullcontext(None)
|
manager: contextlib.AbstractContextManager = contextlib.nullcontext(None)
|
||||||
the_connection = connection
|
the_connection = connection
|
||||||
else:
|
else:
|
||||||
manager = dns.quic.SyncQuicManager(verify_mode=verify)
|
manager = dns.quic.SyncQuicManager(
|
||||||
|
verify_mode=verify, server_name=server_hostname
|
||||||
|
)
|
||||||
the_manager = manager # for type checking happiness
|
the_manager = manager # for type checking happiness
|
||||||
|
|
||||||
with manager:
|
with manager:
|
||||||
if not connection:
|
if not connection:
|
||||||
the_connection = the_manager.connect(where, port, source, source_port)
|
the_connection = the_manager.connect(where, port, source, source_port)
|
||||||
start = time.time()
|
(start, expiration) = _compute_times(timeout)
|
||||||
with the_connection.make_stream() as stream:
|
with the_connection.make_stream(timeout) as stream:
|
||||||
stream.send(wire, True)
|
stream.send(wire, True)
|
||||||
wire = stream.receive(timeout)
|
wire = stream.receive(_remaining(expiration))
|
||||||
finish = time.time()
|
finish = time.time()
|
||||||
r = dns.message.from_wire(
|
r = dns.message.from_wire(
|
||||||
wire,
|
wire,
|
||||||
|
|
|
@ -5,13 +5,13 @@ try:
|
||||||
|
|
||||||
import dns.asyncbackend
|
import dns.asyncbackend
|
||||||
from dns._asyncbackend import NullContext
|
from dns._asyncbackend import NullContext
|
||||||
from dns.quic._sync import SyncQuicManager, SyncQuicConnection, SyncQuicStream
|
|
||||||
from dns.quic._asyncio import (
|
from dns.quic._asyncio import (
|
||||||
AsyncioQuicManager,
|
|
||||||
AsyncioQuicConnection,
|
AsyncioQuicConnection,
|
||||||
|
AsyncioQuicManager,
|
||||||
AsyncioQuicStream,
|
AsyncioQuicStream,
|
||||||
)
|
)
|
||||||
from dns.quic._common import AsyncQuicConnection, AsyncQuicManager
|
from dns.quic._common import AsyncQuicConnection, AsyncQuicManager
|
||||||
|
from dns.quic._sync import SyncQuicConnection, SyncQuicManager, SyncQuicStream
|
||||||
|
|
||||||
have_quic = True
|
have_quic = True
|
||||||
|
|
||||||
|
@ -33,9 +33,10 @@ try:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from dns.quic._trio import ( # pylint: disable=ungrouped-imports
|
from dns.quic._trio import ( # pylint: disable=ungrouped-imports
|
||||||
TrioQuicManager,
|
|
||||||
TrioQuicConnection,
|
TrioQuicConnection,
|
||||||
|
TrioQuicManager,
|
||||||
TrioQuicStream,
|
TrioQuicStream,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -9,14 +9,16 @@ import time
|
||||||
import aioquic.quic.configuration # type: ignore
|
import aioquic.quic.configuration # type: ignore
|
||||||
import aioquic.quic.connection # type: ignore
|
import aioquic.quic.connection # type: ignore
|
||||||
import aioquic.quic.events # type: ignore
|
import aioquic.quic.events # type: ignore
|
||||||
import dns.inet
|
|
||||||
import dns.asyncbackend
|
|
||||||
|
|
||||||
|
import dns.asyncbackend
|
||||||
|
import dns.exception
|
||||||
|
import dns.inet
|
||||||
from dns.quic._common import (
|
from dns.quic._common import (
|
||||||
BaseQuicStream,
|
QUIC_MAX_DATAGRAM,
|
||||||
AsyncQuicConnection,
|
AsyncQuicConnection,
|
||||||
AsyncQuicManager,
|
AsyncQuicManager,
|
||||||
QUIC_MAX_DATAGRAM,
|
BaseQuicStream,
|
||||||
|
UnexpectedEOF,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,15 +32,15 @@ class AsyncioQuicStream(BaseQuicStream):
|
||||||
await self._wake_up.wait()
|
await self._wake_up.wait()
|
||||||
|
|
||||||
async def wait_for(self, amount, expiration):
|
async def wait_for(self, amount, expiration):
|
||||||
timeout = self._timeout_from_expiration(expiration)
|
|
||||||
while True:
|
while True:
|
||||||
|
timeout = self._timeout_from_expiration(expiration)
|
||||||
if self._buffer.have(amount):
|
if self._buffer.have(amount):
|
||||||
return
|
return
|
||||||
self._expecting = amount
|
self._expecting = amount
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self._wait_for_wake_up(), timeout)
|
await asyncio.wait_for(self._wait_for_wake_up(), timeout)
|
||||||
except Exception:
|
except TimeoutError:
|
||||||
pass
|
raise dns.exception.Timeout
|
||||||
self._expecting = 0
|
self._expecting = 0
|
||||||
|
|
||||||
async def receive(self, timeout=None):
|
async def receive(self, timeout=None):
|
||||||
|
@ -86,8 +88,10 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
try:
|
try:
|
||||||
af = dns.inet.af_for_address(self._address)
|
af = dns.inet.af_for_address(self._address)
|
||||||
backend = dns.asyncbackend.get_backend("asyncio")
|
backend = dns.asyncbackend.get_backend("asyncio")
|
||||||
|
# Note that peer is a low-level address tuple, but make_socket() wants
|
||||||
|
# a high-level address tuple, so we convert.
|
||||||
self._socket = await backend.make_socket(
|
self._socket = await backend.make_socket(
|
||||||
af, socket.SOCK_DGRAM, 0, self._source, self._peer
|
af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1])
|
||||||
)
|
)
|
||||||
self._socket_created.set()
|
self._socket_created.set()
|
||||||
async with self._socket:
|
async with self._socket:
|
||||||
|
@ -106,6 +110,11 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
self._wake_timer.notify_all()
|
self._wake_timer.notify_all()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
finally:
|
||||||
|
self._done = True
|
||||||
|
async with self._wake_timer:
|
||||||
|
self._wake_timer.notify_all()
|
||||||
|
self._handshake_complete.set()
|
||||||
|
|
||||||
async def _wait_for_wake_timer(self):
|
async def _wait_for_wake_timer(self):
|
||||||
async with self._wake_timer:
|
async with self._wake_timer:
|
||||||
|
@ -115,7 +124,7 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
await self._socket_created.wait()
|
await self._socket_created.wait()
|
||||||
while not self._done:
|
while not self._done:
|
||||||
datagrams = self._connection.datagrams_to_send(time.time())
|
datagrams = self._connection.datagrams_to_send(time.time())
|
||||||
for (datagram, address) in datagrams:
|
for datagram, address in datagrams:
|
||||||
assert address == self._peer[0]
|
assert address == self._peer[0]
|
||||||
await self._socket.sendto(datagram, self._peer, None)
|
await self._socket.sendto(datagram, self._peer, None)
|
||||||
(expiration, interval) = self._get_timer_values()
|
(expiration, interval) = self._get_timer_values()
|
||||||
|
@ -160,8 +169,13 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
self._receiver_task = asyncio.Task(self._receiver())
|
self._receiver_task = asyncio.Task(self._receiver())
|
||||||
self._sender_task = asyncio.Task(self._sender())
|
self._sender_task = asyncio.Task(self._sender())
|
||||||
|
|
||||||
async def make_stream(self):
|
async def make_stream(self, timeout=None):
|
||||||
await self._handshake_complete.wait()
|
try:
|
||||||
|
await asyncio.wait_for(self._handshake_complete.wait(), timeout)
|
||||||
|
except TimeoutError:
|
||||||
|
raise dns.exception.Timeout
|
||||||
|
if self._done:
|
||||||
|
raise UnexpectedEOF
|
||||||
stream_id = self._connection.get_next_available_stream_id(False)
|
stream_id = self._connection.get_next_available_stream_id(False)
|
||||||
stream = AsyncioQuicStream(self, stream_id)
|
stream = AsyncioQuicStream(self, stream_id)
|
||||||
self._streams[stream_id] = stream
|
self._streams[stream_id] = stream
|
||||||
|
@ -172,6 +186,9 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
self._manager.closed(self._peer[0], self._peer[1])
|
self._manager.closed(self._peer[0], self._peer[1])
|
||||||
self._closed = True
|
self._closed = True
|
||||||
self._connection.close()
|
self._connection.close()
|
||||||
|
# sender might be blocked on this, so set it
|
||||||
|
self._socket_created.set()
|
||||||
|
await self._socket.close()
|
||||||
async with self._wake_timer:
|
async with self._wake_timer:
|
||||||
self._wake_timer.notify_all()
|
self._wake_timer.notify_all()
|
||||||
try:
|
try:
|
||||||
|
@ -185,8 +202,8 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
||||||
|
|
||||||
|
|
||||||
class AsyncioQuicManager(AsyncQuicManager):
|
class AsyncioQuicManager(AsyncQuicManager):
|
||||||
def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED):
|
def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None):
|
||||||
super().__init__(conf, verify_mode, AsyncioQuicConnection)
|
super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name)
|
||||||
|
|
||||||
def connect(self, address, port=853, source=None, source_port=0):
|
def connect(self, address, port=853, source=None, source_port=0):
|
||||||
(connection, start) = self._connect(address, port, source, source_port)
|
(connection, start) = self._connect(address, port, source, source_port)
|
||||||
|
@ -198,7 +215,7 @@ class AsyncioQuicManager(AsyncQuicManager):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
# Copy the itertor into a list as exiting things will mutate the connections
|
# Copy the iterator into a list as exiting things will mutate the connections
|
||||||
# table.
|
# table.
|
||||||
connections = list(self._connections.values())
|
connections = list(self._connections.values())
|
||||||
for connection in connections:
|
for connection in connections:
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue