mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-22 06:13:25 -07:00
Merge branch 'nightly' into dependabot/pip/nightly/beautifulsoup4-4.12.3
This commit is contained in:
commit
0f2de84494
898 changed files with 166350 additions and 15235 deletions
2
.github/workflows/publish-docker.yml
vendored
2
.github/workflows/publish-docker.yml
vendored
|
@ -47,7 +47,7 @@ jobs:
|
|||
version: latest
|
||||
|
||||
- name: Cache Docker Layers
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
|
|
2
.github/workflows/publish-installers.yml
vendored
2
.github/workflows/publish-installers.yml
vendored
|
@ -129,7 +129,7 @@ jobs:
|
|||
echo "$EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
||||
|
|
BIN
lib/PyWin32.chm
Normal file
BIN
lib/PyWin32.chm
Normal file
Binary file not shown.
74
lib/adodbapi/__init__.py
Normal file
74
lib/adodbapi/__init__.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||
|
||||
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||
* http://sourceforge.net/projects/adodbapi
|
||||
"""
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .adodbapi import Connection, Cursor, __version__, connect, dateconverter
|
||||
from .apibase import (
|
||||
BINARY,
|
||||
DATETIME,
|
||||
NUMBER,
|
||||
ROWID,
|
||||
STRING,
|
||||
DatabaseError,
|
||||
DataError,
|
||||
Error,
|
||||
FetchFailedError,
|
||||
IntegrityError,
|
||||
InterfaceError,
|
||||
InternalError,
|
||||
NotSupportedError,
|
||||
OperationalError,
|
||||
ProgrammingError,
|
||||
Warning,
|
||||
apilevel,
|
||||
paramstyle,
|
||||
threadsafety,
|
||||
)
|
||||
|
||||
|
||||
def Binary(aString):
|
||||
"""This function constructs an object capable of holding a binary (long) string value."""
|
||||
return bytes(aString)
|
||||
|
||||
|
||||
def Date(year, month, day):
|
||||
"This function constructs an object holding a date value."
|
||||
return dateconverter.Date(year, month, day)
|
||||
|
||||
|
||||
def Time(hour, minute, second):
|
||||
"This function constructs an object holding a time value."
|
||||
return dateconverter.Time(hour, minute, second)
|
||||
|
||||
|
||||
def Timestamp(year, month, day, hour, minute, second):
|
||||
"This function constructs an object holding a time stamp value."
|
||||
return dateconverter.Timestamp(year, month, day, hour, minute, second)
|
||||
|
||||
|
||||
def DateFromTicks(ticks):
|
||||
"""This function constructs an object holding a date value from the given ticks value
|
||||
(number of seconds since the epoch; see the documentation of the standard Python time module for details).
|
||||
"""
|
||||
return Date(*time.gmtime(ticks)[:3])
|
||||
|
||||
|
||||
def TimeFromTicks(ticks):
|
||||
"""This function constructs an object holding a time value from the given ticks value
|
||||
(number of seconds since the epoch; see the documentation of the standard Python time module for details).
|
||||
"""
|
||||
return Time(*time.gmtime(ticks)[3:6])
|
||||
|
||||
|
||||
def TimestampFromTicks(ticks):
|
||||
"""This function constructs an object holding a time stamp value from the given
|
||||
ticks value (number of seconds since the epoch;
|
||||
see the documentation of the standard Python time module for details)."""
|
||||
return Timestamp(*time.gmtime(ticks)[:6])
|
||||
|
||||
|
||||
version = "adodbapi v" + __version__
|
281
lib/adodbapi/ado_consts.py
Normal file
281
lib/adodbapi/ado_consts.py
Normal file
|
@ -0,0 +1,281 @@
|
|||
# ADO enumerated constants documented on MSDN:
|
||||
# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx
|
||||
|
||||
# IsolationLevelEnum
|
||||
adXactUnspecified = -1
|
||||
adXactBrowse = 0x100
|
||||
adXactChaos = 0x10
|
||||
adXactCursorStability = 0x1000
|
||||
adXactIsolated = 0x100000
|
||||
adXactReadCommitted = 0x1000
|
||||
adXactReadUncommitted = 0x100
|
||||
adXactRepeatableRead = 0x10000
|
||||
adXactSerializable = 0x100000
|
||||
|
||||
# CursorLocationEnum
|
||||
adUseClient = 3
|
||||
adUseServer = 2
|
||||
|
||||
# CursorTypeEnum
|
||||
adOpenDynamic = 2
|
||||
adOpenForwardOnly = 0
|
||||
adOpenKeyset = 1
|
||||
adOpenStatic = 3
|
||||
adOpenUnspecified = -1
|
||||
|
||||
# CommandTypeEnum
|
||||
adCmdText = 1
|
||||
adCmdStoredProc = 4
|
||||
adSchemaTables = 20
|
||||
|
||||
# ParameterDirectionEnum
|
||||
adParamInput = 1
|
||||
adParamInputOutput = 3
|
||||
adParamOutput = 2
|
||||
adParamReturnValue = 4
|
||||
adParamUnknown = 0
|
||||
directions = {
|
||||
0: "Unknown",
|
||||
1: "Input",
|
||||
2: "Output",
|
||||
3: "InputOutput",
|
||||
4: "Return",
|
||||
}
|
||||
|
||||
|
||||
def ado_direction_name(ado_dir):
|
||||
try:
|
||||
return "adParam" + directions[ado_dir]
|
||||
except:
|
||||
return "unknown direction (" + str(ado_dir) + ")"
|
||||
|
||||
|
||||
# ObjectStateEnum
|
||||
adStateClosed = 0
|
||||
adStateOpen = 1
|
||||
adStateConnecting = 2
|
||||
adStateExecuting = 4
|
||||
adStateFetching = 8
|
||||
|
||||
# FieldAttributeEnum
|
||||
adFldMayBeNull = 0x40
|
||||
|
||||
# ConnectModeEnum
|
||||
adModeUnknown = 0
|
||||
adModeRead = 1
|
||||
adModeWrite = 2
|
||||
adModeReadWrite = 3
|
||||
adModeShareDenyRead = 4
|
||||
adModeShareDenyWrite = 8
|
||||
adModeShareExclusive = 12
|
||||
adModeShareDenyNone = 16
|
||||
adModeRecursive = 0x400000
|
||||
|
||||
# XactAttributeEnum
|
||||
adXactCommitRetaining = 131072
|
||||
adXactAbortRetaining = 262144
|
||||
|
||||
ado_error_TIMEOUT = -2147217871
|
||||
|
||||
# DataTypeEnum - ADO Data types documented at:
|
||||
# http://msdn2.microsoft.com/en-us/library/ms675318.aspx
|
||||
adArray = 0x2000
|
||||
adEmpty = 0x0
|
||||
adBSTR = 0x8
|
||||
adBigInt = 0x14
|
||||
adBinary = 0x80
|
||||
adBoolean = 0xB
|
||||
adChapter = 0x88
|
||||
adChar = 0x81
|
||||
adCurrency = 0x6
|
||||
adDBDate = 0x85
|
||||
adDBTime = 0x86
|
||||
adDBTimeStamp = 0x87
|
||||
adDate = 0x7
|
||||
adDecimal = 0xE
|
||||
adDouble = 0x5
|
||||
adError = 0xA
|
||||
adFileTime = 0x40
|
||||
adGUID = 0x48
|
||||
adIDispatch = 0x9
|
||||
adIUnknown = 0xD
|
||||
adInteger = 0x3
|
||||
adLongVarBinary = 0xCD
|
||||
adLongVarChar = 0xC9
|
||||
adLongVarWChar = 0xCB
|
||||
adNumeric = 0x83
|
||||
adPropVariant = 0x8A
|
||||
adSingle = 0x4
|
||||
adSmallInt = 0x2
|
||||
adTinyInt = 0x10
|
||||
adUnsignedBigInt = 0x15
|
||||
adUnsignedInt = 0x13
|
||||
adUnsignedSmallInt = 0x12
|
||||
adUnsignedTinyInt = 0x11
|
||||
adUserDefined = 0x84
|
||||
adVarBinary = 0xCC
|
||||
adVarChar = 0xC8
|
||||
adVarNumeric = 0x8B
|
||||
adVarWChar = 0xCA
|
||||
adVariant = 0xC
|
||||
adWChar = 0x82
|
||||
# Additional constants used by introspection but not ADO itself
|
||||
AUTO_FIELD_MARKER = -1000
|
||||
|
||||
adTypeNames = {
|
||||
adBSTR: "adBSTR",
|
||||
adBigInt: "adBigInt",
|
||||
adBinary: "adBinary",
|
||||
adBoolean: "adBoolean",
|
||||
adChapter: "adChapter",
|
||||
adChar: "adChar",
|
||||
adCurrency: "adCurrency",
|
||||
adDBDate: "adDBDate",
|
||||
adDBTime: "adDBTime",
|
||||
adDBTimeStamp: "adDBTimeStamp",
|
||||
adDate: "adDate",
|
||||
adDecimal: "adDecimal",
|
||||
adDouble: "adDouble",
|
||||
adEmpty: "adEmpty",
|
||||
adError: "adError",
|
||||
adFileTime: "adFileTime",
|
||||
adGUID: "adGUID",
|
||||
adIDispatch: "adIDispatch",
|
||||
adIUnknown: "adIUnknown",
|
||||
adInteger: "adInteger",
|
||||
adLongVarBinary: "adLongVarBinary",
|
||||
adLongVarChar: "adLongVarChar",
|
||||
adLongVarWChar: "adLongVarWChar",
|
||||
adNumeric: "adNumeric",
|
||||
adPropVariant: "adPropVariant",
|
||||
adSingle: "adSingle",
|
||||
adSmallInt: "adSmallInt",
|
||||
adTinyInt: "adTinyInt",
|
||||
adUnsignedBigInt: "adUnsignedBigInt",
|
||||
adUnsignedInt: "adUnsignedInt",
|
||||
adUnsignedSmallInt: "adUnsignedSmallInt",
|
||||
adUnsignedTinyInt: "adUnsignedTinyInt",
|
||||
adUserDefined: "adUserDefined",
|
||||
adVarBinary: "adVarBinary",
|
||||
adVarChar: "adVarChar",
|
||||
adVarNumeric: "adVarNumeric",
|
||||
adVarWChar: "adVarWChar",
|
||||
adVariant: "adVariant",
|
||||
adWChar: "adWChar",
|
||||
}
|
||||
|
||||
|
||||
def ado_type_name(ado_type):
|
||||
return adTypeNames.get(ado_type, "unknown type (" + str(ado_type) + ")")
|
||||
|
||||
|
||||
# here in decimal, sorted by value
|
||||
# adEmpty 0 Specifies no value (DBTYPE_EMPTY).
|
||||
# adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2).
|
||||
# adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4).
|
||||
# adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4).
|
||||
# adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8).
|
||||
# adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number
|
||||
# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000.
|
||||
# adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is
|
||||
# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day.
|
||||
# adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR).
|
||||
# adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH).
|
||||
# adError 10 Indicates a 32-bit error code (DBTYPE_ERROR).
|
||||
# adBoolean 11 Indicates a boolean value (DBTYPE_BOOL).
|
||||
# adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT).
|
||||
# adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN).
|
||||
# adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL).
|
||||
# adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1).
|
||||
# adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1).
|
||||
# adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2).
|
||||
# adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4).
|
||||
# adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8).
|
||||
# adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8).
|
||||
# adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since
|
||||
# January 1, 1601 (DBTYPE_FILETIME).
|
||||
# adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID).
|
||||
# adBinary 128 Indicates a binary value (DBTYPE_BYTES).
|
||||
# adChar 129 Indicates a string value (DBTYPE_STR).
|
||||
# adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR).
|
||||
# adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC).
|
||||
# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
|
||||
# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
|
||||
# adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE).
|
||||
# adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME).
|
||||
# adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP).
|
||||
# adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER).
|
||||
# adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT).
|
||||
# adVarNumeric 139 Indicates a numeric value (Parameter object only).
|
||||
# adVarChar 200 Indicates a string value (Parameter object only).
|
||||
# adLongVarChar 201 Indicates a long string value (Parameter object only).
|
||||
# adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only).
|
||||
# adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only).
|
||||
# adVarBinary 204 Indicates a binary value (Parameter object only).
|
||||
# adLongVarBinary 205 Indicates a long binary value (Parameter object only).
|
||||
# adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant,
|
||||
# that indicates an array of that other data type.
|
||||
|
||||
# Error codes to names
|
||||
adoErrors = {
|
||||
0xE7B: "adErrBoundToCommand",
|
||||
0xE94: "adErrCannotComplete",
|
||||
0xEA4: "adErrCantChangeConnection",
|
||||
0xC94: "adErrCantChangeProvider",
|
||||
0xE8C: "adErrCantConvertvalue",
|
||||
0xE8D: "adErrCantCreate",
|
||||
0xEA3: "adErrCatalogNotSet",
|
||||
0xE8E: "adErrColumnNotOnThisRow",
|
||||
0xD5D: "adErrDataConversion",
|
||||
0xE89: "adErrDataOverflow",
|
||||
0xE9A: "adErrDelResOutOfScope",
|
||||
0xEA6: "adErrDenyNotSupported",
|
||||
0xEA7: "adErrDenyTypeNotSupported",
|
||||
0xCB3: "adErrFeatureNotAvailable",
|
||||
0xEA5: "adErrFieldsUpdateFailed",
|
||||
0xC93: "adErrIllegalOperation",
|
||||
0xCAE: "adErrInTransaction",
|
||||
0xE87: "adErrIntegrityViolation",
|
||||
0xBB9: "adErrInvalidArgument",
|
||||
0xE7D: "adErrInvalidConnection",
|
||||
0xE7C: "adErrInvalidParamInfo",
|
||||
0xE82: "adErrInvalidTransaction",
|
||||
0xE91: "adErrInvalidURL",
|
||||
0xCC1: "adErrItemNotFound",
|
||||
0xBCD: "adErrNoCurrentRecord",
|
||||
0xE83: "adErrNotExecuting",
|
||||
0xE7E: "adErrNotReentrant",
|
||||
0xE78: "adErrObjectClosed",
|
||||
0xD27: "adErrObjectInCollection",
|
||||
0xD5C: "adErrObjectNotSet",
|
||||
0xE79: "adErrObjectOpen",
|
||||
0xBBA: "adErrOpeningFile",
|
||||
0xE80: "adErrOperationCancelled",
|
||||
0xE96: "adErrOutOfSpace",
|
||||
0xE88: "adErrPermissionDenied",
|
||||
0xE9E: "adErrPropConflicting",
|
||||
0xE9B: "adErrPropInvalidColumn",
|
||||
0xE9C: "adErrPropInvalidOption",
|
||||
0xE9D: "adErrPropInvalidValue",
|
||||
0xE9F: "adErrPropNotAllSettable",
|
||||
0xEA0: "adErrPropNotSet",
|
||||
0xEA1: "adErrPropNotSettable",
|
||||
0xEA2: "adErrPropNotSupported",
|
||||
0xBB8: "adErrProviderFailed",
|
||||
0xE7A: "adErrProviderNotFound",
|
||||
0xBBB: "adErrReadFile",
|
||||
0xE93: "adErrResourceExists",
|
||||
0xE92: "adErrResourceLocked",
|
||||
0xE97: "adErrResourceOutOfScope",
|
||||
0xE8A: "adErrSchemaViolation",
|
||||
0xE8B: "adErrSignMismatch",
|
||||
0xE81: "adErrStillConnecting",
|
||||
0xE7F: "adErrStillExecuting",
|
||||
0xE90: "adErrTreePermissionDenied",
|
||||
0xE8F: "adErrURLDoesNotExist",
|
||||
0xE99: "adErrURLNamedRowDoesNotExist",
|
||||
0xE98: "adErrUnavailable",
|
||||
0xE84: "adErrUnsafeOperation",
|
||||
0xE95: "adErrVolumeNotFound",
|
||||
0xBBC: "adErrWriteFile",
|
||||
}
|
1223
lib/adodbapi/adodbapi.py
Normal file
1223
lib/adodbapi/adodbapi.py
Normal file
File diff suppressed because it is too large
Load diff
794
lib/adodbapi/apibase.py
Normal file
794
lib/adodbapi/apibase.py
Normal file
|
@ -0,0 +1,794 @@
|
|||
"""adodbapi.apibase - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||
|
||||
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||
* http://sourceforge.net/projects/pywin32
|
||||
* http://sourceforge.net/projects/adodbapi
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import numbers
|
||||
import sys
|
||||
import time
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from . import ado_consts as adc
|
||||
|
||||
verbose = False # debugging flag
|
||||
|
||||
onIronPython = sys.platform == "cli"
|
||||
if onIronPython: # we need type definitions for odd data we may need to convert
|
||||
# noinspection PyUnresolvedReferences
|
||||
from System import DateTime, DBNull
|
||||
|
||||
NullTypes = (type(None), DBNull)
|
||||
else:
|
||||
DateTime = type(NotImplemented) # should never be seen on win32
|
||||
NullTypes = type(None)
|
||||
|
||||
# --- define objects to smooth out Python3 <-> Python 2.x differences
|
||||
unicodeType = str
|
||||
longType = int
|
||||
StringTypes = str
|
||||
makeByteBuffer = bytes
|
||||
memoryViewType = memoryview
|
||||
_BaseException = Exception
|
||||
|
||||
try: # jdhardy -- handle bytes under IronPython & Py3
|
||||
bytes
|
||||
except NameError:
|
||||
bytes = str # define it for old Pythons
|
||||
|
||||
|
||||
# ------- Error handlers ------
|
||||
def standardErrorHandler(connection, cursor, errorclass, errorvalue):
|
||||
err = (errorclass, errorvalue)
|
||||
try:
|
||||
connection.messages.append(err)
|
||||
except:
|
||||
pass
|
||||
if cursor is not None:
|
||||
try:
|
||||
cursor.messages.append(err)
|
||||
except:
|
||||
pass
|
||||
raise errorclass(errorvalue)
|
||||
|
||||
|
||||
# Note: _BaseException is defined differently between Python 2.x and 3.x
|
||||
class Error(_BaseException):
|
||||
pass # Exception that is the base class of all other error
|
||||
# exceptions. You can use this to catch all errors with one
|
||||
# single 'except' statement. Warnings are not considered
|
||||
# errors and thus should not use this class as base. It must
|
||||
# be a subclass of the Python StandardError (defined in the
|
||||
# module exceptions).
|
||||
|
||||
|
||||
class Warning(_BaseException):
|
||||
pass
|
||||
|
||||
|
||||
class InterfaceError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseError(Error):
|
||||
pass
|
||||
|
||||
|
||||
class InternalError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class OperationalError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class ProgrammingError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class IntegrityError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class DataError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class NotSupportedError(DatabaseError):
|
||||
pass
|
||||
|
||||
|
||||
class FetchFailedError(OperationalError):
|
||||
"""
|
||||
Error is used by RawStoredProcedureQuerySet to determine when a fetch
|
||||
failed due to a connection being closed or there is no record set
|
||||
returned. (Non-standard, added especially for django)
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# # # # # ----- Type Objects and Constructors ----- # # # # #
|
||||
# Many databases need to have the input in a particular format for binding to an operation's input parameters.
|
||||
# For example, if an input is destined for a DATE column, then it must be bound to the database in a particular
|
||||
# string format. Similar problems exist for "Row ID" columns or large binary items (e.g. blobs or RAW columns).
|
||||
# This presents problems for Python since the parameters to the executeXXX() method are untyped.
|
||||
# When the database module sees a Python string object, it doesn't know if it should be bound as a simple CHAR
|
||||
# column, as a raw BINARY item, or as a DATE.
|
||||
#
|
||||
# To overcome this problem, a module must provide the constructors defined below to create objects that can
|
||||
# hold special values. When passed to the cursor methods, the module can then detect the proper type of
|
||||
# the input parameter and bind it accordingly.
|
||||
|
||||
# A Cursor Object's description attribute returns information about each of the result columns of a query.
|
||||
# The type_code must compare equal to one of Type Objects defined below. Type Objects may be equal to more than
|
||||
# one type code (e.g. DATETIME could be equal to the type codes for date, time and timestamp columns;
|
||||
# see the Implementation Hints below for details).
|
||||
|
||||
# SQL NULL values are represented by the Python None singleton on input and output.
|
||||
|
||||
# Note: Usage of Unix ticks for database interfacing can cause troubles because of the limited date range they cover.
|
||||
|
||||
|
||||
# def Date(year,month,day):
|
||||
# "This function constructs an object holding a date value. "
|
||||
# return dateconverter.date(year,month,day) #dateconverter.Date(year,month,day)
|
||||
#
|
||||
# def Time(hour,minute,second):
|
||||
# "This function constructs an object holding a time value. "
|
||||
# return dateconverter.time(hour, minute, second) # dateconverter.Time(hour,minute,second)
|
||||
#
|
||||
# def Timestamp(year,month,day,hour,minute,second):
|
||||
# "This function constructs an object holding a time stamp value. "
|
||||
# return dateconverter.datetime(year,month,day,hour,minute,second)
|
||||
#
|
||||
# def DateFromTicks(ticks):
|
||||
# """This function constructs an object holding a date value from the given ticks value
|
||||
# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """
|
||||
# return Date(*time.gmtime(ticks)[:3])
|
||||
#
|
||||
# def TimeFromTicks(ticks):
|
||||
# """This function constructs an object holding a time value from the given ticks value
|
||||
# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """
|
||||
# return Time(*time.gmtime(ticks)[3:6])
|
||||
#
|
||||
# def TimestampFromTicks(ticks):
|
||||
# """This function constructs an object holding a time stamp value from the given
|
||||
# ticks value (number of seconds since the epoch;
|
||||
# see the documentation of the standard Python time module for details). """
|
||||
# return Timestamp(*time.gmtime(ticks)[:6])
|
||||
#
|
||||
# def Binary(aString):
|
||||
# """This function constructs an object capable of holding a binary (long) string value. """
|
||||
# b = makeByteBuffer(aString)
|
||||
# return b
|
||||
# ----- Time converters ----------------------------------------------
|
||||
class TimeConverter(object): # this is a generic time converter skeleton
|
||||
def __init__(self): # the details will be filled in by instances
|
||||
self._ordinal_1899_12_31 = datetime.date(1899, 12, 31).toordinal() - 1
|
||||
# Use cls.types to compare if an input parameter is a datetime
|
||||
self.types = {
|
||||
type(self.Date(2000, 1, 1)),
|
||||
type(self.Time(12, 1, 1)),
|
||||
type(self.Timestamp(2000, 1, 1, 12, 1, 1)),
|
||||
datetime.datetime,
|
||||
datetime.time,
|
||||
datetime.date,
|
||||
}
|
||||
|
||||
def COMDate(self, obj):
|
||||
"""Returns a ComDate from a date-time"""
|
||||
try: # most likely a datetime
|
||||
tt = obj.timetuple()
|
||||
|
||||
try:
|
||||
ms = obj.microsecond
|
||||
except:
|
||||
ms = 0
|
||||
return self.ComDateFromTuple(tt, ms)
|
||||
except: # might be a tuple
|
||||
try:
|
||||
return self.ComDateFromTuple(obj)
|
||||
except: # try an mxdate
|
||||
try:
|
||||
return obj.COMDate()
|
||||
except:
|
||||
raise ValueError('Cannot convert "%s" to COMdate.' % repr(obj))
|
||||
|
||||
def ComDateFromTuple(self, t, microseconds=0):
|
||||
d = datetime.date(t[0], t[1], t[2])
|
||||
integerPart = d.toordinal() - self._ordinal_1899_12_31
|
||||
ms = (t[3] * 3600 + t[4] * 60 + t[5]) * 1000000 + microseconds
|
||||
fractPart = float(ms) / 86400000000.0
|
||||
return integerPart + fractPart
|
||||
|
||||
def DateObjectFromCOMDate(self, comDate):
|
||||
"Returns an object of the wanted type from a ComDate"
|
||||
raise NotImplementedError # "Abstract class"
|
||||
|
||||
def Date(self, year, month, day):
|
||||
"This function constructs an object holding a date value."
|
||||
raise NotImplementedError # "Abstract class"
|
||||
|
||||
def Time(self, hour, minute, second):
|
||||
"This function constructs an object holding a time value."
|
||||
raise NotImplementedError # "Abstract class"
|
||||
|
||||
def Timestamp(self, year, month, day, hour, minute, second):
|
||||
"This function constructs an object holding a time stamp value."
|
||||
raise NotImplementedError # "Abstract class"
|
||||
# all purpose date to ISO format converter
|
||||
|
||||
def DateObjectToIsoFormatString(self, obj):
|
||||
"This function should return a string in the format 'YYYY-MM-dd HH:MM:SS:ms' (ms optional)"
|
||||
try: # most likely, a datetime.datetime
|
||||
s = obj.isoformat(" ")
|
||||
except (TypeError, AttributeError):
|
||||
if isinstance(obj, datetime.date):
|
||||
s = obj.isoformat() + " 00:00:00" # return exact midnight
|
||||
else:
|
||||
try: # maybe it has a strftime method, like mx
|
||||
s = obj.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except AttributeError:
|
||||
try: # but may be time.struct_time
|
||||
s = time.strftime("%Y-%m-%d %H:%M:%S", obj)
|
||||
except:
|
||||
raise ValueError('Cannot convert "%s" to isoformat' % repr(obj))
|
||||
return s
|
||||
|
||||
|
||||
# -- Optional: if mx extensions are installed you may use mxDateTime ----
|
||||
try:
|
||||
import mx.DateTime
|
||||
|
||||
mxDateTime = True
|
||||
except:
|
||||
mxDateTime = False
|
||||
if mxDateTime:
|
||||
|
||||
class mxDateTimeConverter(TimeConverter): # used optionally if installed
|
||||
def __init__(self):
|
||||
TimeConverter.__init__(self)
|
||||
self.types.add(type(mx.DateTime))
|
||||
|
||||
def DateObjectFromCOMDate(self, comDate):
|
||||
return mx.DateTime.DateTimeFromCOMDate(comDate)
|
||||
|
||||
def Date(self, year, month, day):
|
||||
return mx.DateTime.Date(year, month, day)
|
||||
|
||||
def Time(self, hour, minute, second):
|
||||
return mx.DateTime.Time(hour, minute, second)
|
||||
|
||||
def Timestamp(self, year, month, day, hour, minute, second):
|
||||
return mx.DateTime.Timestamp(year, month, day, hour, minute, second)
|
||||
|
||||
else:
|
||||
|
||||
class mxDateTimeConverter(TimeConverter):
|
||||
pass # if no mx is installed
|
||||
|
||||
|
||||
class pythonDateTimeConverter(TimeConverter): # standard since Python 2.3
|
||||
def __init__(self):
|
||||
TimeConverter.__init__(self)
|
||||
|
||||
def DateObjectFromCOMDate(self, comDate):
|
||||
if isinstance(comDate, datetime.datetime):
|
||||
odn = comDate.toordinal()
|
||||
tim = comDate.time()
|
||||
new = datetime.datetime.combine(datetime.datetime.fromordinal(odn), tim)
|
||||
return new
|
||||
# return comDate.replace(tzinfo=None) # make non aware
|
||||
elif isinstance(comDate, DateTime):
|
||||
fComDate = comDate.ToOADate() # ironPython clr Date/Time
|
||||
else:
|
||||
fComDate = float(comDate) # ComDate is number of days since 1899-12-31
|
||||
integerPart = int(fComDate)
|
||||
floatpart = fComDate - integerPart
|
||||
##if floatpart == 0.0:
|
||||
## return datetime.date.fromordinal(integerPart + self._ordinal_1899_12_31)
|
||||
dte = datetime.datetime.fromordinal(
|
||||
integerPart + self._ordinal_1899_12_31
|
||||
) + datetime.timedelta(milliseconds=floatpart * 86400000)
|
||||
# millisecondsperday=86400000 # 24*60*60*1000
|
||||
return dte
|
||||
|
||||
def Date(self, year, month, day):
|
||||
return datetime.date(year, month, day)
|
||||
|
||||
def Time(self, hour, minute, second):
|
||||
return datetime.time(hour, minute, second)
|
||||
|
||||
def Timestamp(self, year, month, day, hour, minute, second):
|
||||
return datetime.datetime(year, month, day, hour, minute, second)
|
||||
|
||||
|
||||
class pythonTimeConverter(TimeConverter): # the old, ?nix type date and time
|
||||
def __init__(self): # caution: this Class gets confised by timezones and DST
|
||||
TimeConverter.__init__(self)
|
||||
self.types.add(time.struct_time)
|
||||
|
||||
def DateObjectFromCOMDate(self, comDate):
|
||||
"Returns ticks since 1970"
|
||||
if isinstance(comDate, datetime.datetime):
|
||||
return comDate.timetuple()
|
||||
elif isinstance(comDate, DateTime): # ironPython clr date/time
|
||||
fcomDate = comDate.ToOADate()
|
||||
else:
|
||||
fcomDate = float(comDate)
|
||||
secondsperday = 86400 # 24*60*60
|
||||
# ComDate is number of days since 1899-12-31, gmtime epoch is 1970-1-1 = 25569 days
|
||||
t = time.gmtime(secondsperday * (fcomDate - 25569.0))
|
||||
return t # year,month,day,hour,minute,second,weekday,julianday,daylightsaving=t
|
||||
|
||||
def Date(self, year, month, day):
|
||||
return self.Timestamp(year, month, day, 0, 0, 0)
|
||||
|
||||
def Time(self, hour, minute, second):
|
||||
return time.gmtime((hour * 60 + minute) * 60 + second)
|
||||
|
||||
def Timestamp(self, year, month, day, hour, minute, second):
|
||||
return time.localtime(
|
||||
time.mktime((year, month, day, hour, minute, second, 0, 0, -1))
|
||||
)
|
||||
|
||||
|
||||
base_dateconverter = pythonDateTimeConverter()
|
||||
|
||||
# ------ DB API required module attributes ---------------------
|
||||
threadsafety = 1 # TODO -- find out whether this module is actually BETTER than 1.
|
||||
|
||||
apilevel = "2.0" # String constant stating the supported DB API level.
|
||||
|
||||
paramstyle = "qmark" # the default parameter style
|
||||
|
||||
# ------ control for an extension which may become part of DB API 3.0 ---
|
||||
accepted_paramstyles = ("qmark", "named", "format", "pyformat", "dynamic")
|
||||
|
||||
# ------------------------------------------------------------------------------------------
|
||||
# define similar types for generic conversion routines
|
||||
adoIntegerTypes = (
|
||||
adc.adInteger,
|
||||
adc.adSmallInt,
|
||||
adc.adTinyInt,
|
||||
adc.adUnsignedInt,
|
||||
adc.adUnsignedSmallInt,
|
||||
adc.adUnsignedTinyInt,
|
||||
adc.adBoolean,
|
||||
adc.adError,
|
||||
) # max 32 bits
|
||||
adoRowIdTypes = (adc.adChapter,) # v2.1 Rose
|
||||
adoLongTypes = (adc.adBigInt, adc.adFileTime, adc.adUnsignedBigInt)
|
||||
adoExactNumericTypes = (
|
||||
adc.adDecimal,
|
||||
adc.adNumeric,
|
||||
adc.adVarNumeric,
|
||||
adc.adCurrency,
|
||||
) # v2.3 Cole
|
||||
adoApproximateNumericTypes = (adc.adDouble, adc.adSingle) # v2.1 Cole
|
||||
adoStringTypes = (
|
||||
adc.adBSTR,
|
||||
adc.adChar,
|
||||
adc.adLongVarChar,
|
||||
adc.adLongVarWChar,
|
||||
adc.adVarChar,
|
||||
adc.adVarWChar,
|
||||
adc.adWChar,
|
||||
)
|
||||
adoBinaryTypes = (adc.adBinary, adc.adLongVarBinary, adc.adVarBinary)
|
||||
adoDateTimeTypes = (adc.adDBTime, adc.adDBTimeStamp, adc.adDate, adc.adDBDate)
|
||||
adoRemainingTypes = (
|
||||
adc.adEmpty,
|
||||
adc.adIDispatch,
|
||||
adc.adIUnknown,
|
||||
adc.adPropVariant,
|
||||
adc.adArray,
|
||||
adc.adUserDefined,
|
||||
adc.adVariant,
|
||||
adc.adGUID,
|
||||
)
|
||||
|
||||
|
||||
# this class is a trick to determine whether a type is a member of a related group of types. see PEP notes
|
||||
class DBAPITypeObject(object):
|
||||
def __init__(self, valuesTuple):
|
||||
self.values = frozenset(valuesTuple)
|
||||
|
||||
def __eq__(self, other):
|
||||
return other in self.values
|
||||
|
||||
def __ne__(self, other):
|
||||
return other not in self.values
|
||||
|
||||
|
||||
"""This type object is used to describe columns in a database that are string-based (e.g. CHAR). """
|
||||
STRING = DBAPITypeObject(adoStringTypes)
|
||||
|
||||
"""This type object is used to describe (long) binary columns in a database (e.g. LONG, RAW, BLOBs). """
|
||||
BINARY = DBAPITypeObject(adoBinaryTypes)
|
||||
|
||||
"""This type object is used to describe numeric columns in a database. """
|
||||
NUMBER = DBAPITypeObject(
|
||||
adoIntegerTypes + adoLongTypes + adoExactNumericTypes + adoApproximateNumericTypes
|
||||
)
|
||||
|
||||
"""This type object is used to describe date/time columns in a database. """
|
||||
|
||||
DATETIME = DBAPITypeObject(adoDateTimeTypes)
|
||||
"""This type object is used to describe the "Row ID" column in a database. """
|
||||
ROWID = DBAPITypeObject(adoRowIdTypes)
|
||||
|
||||
OTHER = DBAPITypeObject(adoRemainingTypes)
|
||||
|
||||
# ------- utilities for translating python data types to ADO data types ---------------------------------
|
||||
typeMap = {
|
||||
memoryViewType: adc.adVarBinary,
|
||||
float: adc.adDouble,
|
||||
type(None): adc.adEmpty,
|
||||
str: adc.adBSTR,
|
||||
bool: adc.adBoolean, # v2.1 Cole
|
||||
decimal.Decimal: adc.adDecimal,
|
||||
int: adc.adBigInt,
|
||||
bytes: adc.adVarBinary,
|
||||
}
|
||||
|
||||
|
||||
def pyTypeToADOType(d):
|
||||
tp = type(d)
|
||||
try:
|
||||
return typeMap[tp]
|
||||
except KeyError: # The type was not defined in the pre-computed Type table
|
||||
from . import dateconverter
|
||||
|
||||
if (
|
||||
tp in dateconverter.types
|
||||
): # maybe it is one of our supported Date/Time types
|
||||
return adc.adDate
|
||||
# otherwise, attempt to discern the type by probing the data object itself -- to handle duck typing
|
||||
if isinstance(d, StringTypes):
|
||||
return adc.adBSTR
|
||||
if isinstance(d, numbers.Integral):
|
||||
return adc.adBigInt
|
||||
if isinstance(d, numbers.Real):
|
||||
return adc.adDouble
|
||||
raise DataError('cannot convert "%s" (type=%s) to ADO' % (repr(d), tp))
|
||||
|
||||
|
||||
# # # # # # # # # # # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# functions to convert database values to Python objects
|
||||
# ------------------------------------------------------------------------
|
||||
# variant type : function converting variant to Python value
|
||||
def variantConvertDate(v):
|
||||
from . import dateconverter # this function only called when adodbapi is running
|
||||
|
||||
return dateconverter.DateObjectFromCOMDate(v)
|
||||
|
||||
|
||||
def cvtString(variant): # use to get old action of adodbapi v1 if desired
|
||||
if onIronPython:
|
||||
try:
|
||||
return variant.ToString()
|
||||
except:
|
||||
pass
|
||||
return str(variant)
|
||||
|
||||
|
||||
def cvtDecimal(variant): # better name
|
||||
return _convertNumberWithCulture(variant, decimal.Decimal)
|
||||
|
||||
|
||||
def cvtNumeric(variant): # older name - don't break old code
|
||||
return cvtDecimal(variant)
|
||||
|
||||
|
||||
def cvtFloat(variant):
|
||||
return _convertNumberWithCulture(variant, float)
|
||||
|
||||
|
||||
def _convertNumberWithCulture(variant, f):
|
||||
try:
|
||||
return f(variant)
|
||||
except (ValueError, TypeError, decimal.InvalidOperation):
|
||||
try:
|
||||
europeVsUS = str(variant).replace(",", ".")
|
||||
return f(europeVsUS)
|
||||
except (ValueError, TypeError, decimal.InvalidOperation):
|
||||
pass
|
||||
|
||||
|
||||
def cvtInt(variant):
|
||||
return int(variant)
|
||||
|
||||
|
||||
def cvtLong(variant): # only important in old versions where long and int differ
|
||||
return int(variant)
|
||||
|
||||
|
||||
def cvtBuffer(variant):
|
||||
return bytes(variant)
|
||||
|
||||
|
||||
def cvtUnicode(variant):
|
||||
return str(variant)
|
||||
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
|
||||
|
||||
def cvtUnusual(variant):
|
||||
if verbose > 1:
|
||||
sys.stderr.write("Conversion called for Unusual data=%s\n" % repr(variant))
|
||||
if isinstance(variant, DateTime): # COMdate or System.Date
|
||||
from .adodbapi import ( # this will only be called when adodbapi is in use, and very rarely
|
||||
dateconverter,
|
||||
)
|
||||
|
||||
return dateconverter.DateObjectFromCOMDate(variant)
|
||||
return variant # cannot find conversion function -- just give the data to the user
|
||||
|
||||
|
||||
def convert_to_python(variant, func): # convert DB value into Python value
|
||||
if isinstance(variant, NullTypes): # IronPython Null or None
|
||||
return None
|
||||
return func(variant) # call the appropriate conversion function
|
||||
|
||||
|
||||
class MultiMap(dict): # builds a dictionary from {(sequence,of,keys) : function}
|
||||
"""A dictionary of ado.type : function -- but you can set multiple items by passing a sequence of keys"""
|
||||
|
||||
# useful for defining conversion functions for groups of similar data types.
|
||||
def __init__(self, aDict):
|
||||
for k, v in list(aDict.items()):
|
||||
self[k] = v # we must call __setitem__
|
||||
|
||||
def __setitem__(self, adoType, cvtFn):
|
||||
"set a single item, or a whole sequence of items"
|
||||
try: # user passed us a sequence, set them individually
|
||||
for type in adoType:
|
||||
dict.__setitem__(self, type, cvtFn)
|
||||
except TypeError: # a single value fails attempt to iterate
|
||||
dict.__setitem__(self, adoType, cvtFn)
|
||||
|
||||
|
||||
# initialize variantConversions dictionary used to convert SQL to Python
|
||||
# this is the dictionary of default conversion functions, built by the class above.
|
||||
# this becomes a class attribute for the Connection, and that attribute is used
|
||||
# to build the list of column conversion functions for the Cursor
|
||||
variantConversions = MultiMap(
|
||||
{
|
||||
adoDateTimeTypes: variantConvertDate,
|
||||
adoApproximateNumericTypes: cvtFloat,
|
||||
adoExactNumericTypes: cvtDecimal, # use to force decimal rather than unicode
|
||||
adoLongTypes: cvtLong,
|
||||
adoIntegerTypes: cvtInt,
|
||||
adoRowIdTypes: cvtInt,
|
||||
adoStringTypes: identity,
|
||||
adoBinaryTypes: cvtBuffer,
|
||||
adoRemainingTypes: cvtUnusual,
|
||||
}
|
||||
)
|
||||
|
||||
# # # # # classes to emulate the result of cursor.fetchxxx() as a sequence of sequences # # # # #
|
||||
# "an ENUM of how my low level records are laid out"
|
||||
RS_WIN_32, RS_ARRAY, RS_REMOTE = list(range(1, 4))
|
||||
|
||||
|
||||
class SQLrow(object): # a single database row
|
||||
# class to emulate a sequence, so that a column may be retrieved by either number or name
|
||||
def __init__(self, rows, index): # "rows" is an _SQLrows object, index is which row
|
||||
self.rows = rows # parent 'fetch' container object
|
||||
self.index = index # my row number within parent
|
||||
|
||||
def __getattr__(self, name): # used for row.columnName type of value access
|
||||
try:
|
||||
return self._getValue(self.rows.columnNames[name.lower()])
|
||||
except KeyError:
|
||||
raise AttributeError('Unknown column name "{}"'.format(name))
|
||||
|
||||
def _getValue(self, key): # key must be an integer
|
||||
if (
|
||||
self.rows.recordset_format == RS_ARRAY
|
||||
): # retrieve from two-dimensional array
|
||||
v = self.rows.ado_results[key, self.index]
|
||||
elif self.rows.recordset_format == RS_REMOTE:
|
||||
v = self.rows.ado_results[self.index][key]
|
||||
else: # pywin32 - retrieve from tuple of tuples
|
||||
v = self.rows.ado_results[key][self.index]
|
||||
if self.rows.converters is NotImplemented:
|
||||
return v
|
||||
return convert_to_python(v, self.rows.converters[key])
|
||||
|
||||
def __len__(self):
|
||||
return self.rows.numberOfColumns
|
||||
|
||||
def __getitem__(self, key): # used for row[key] type of value access
|
||||
if isinstance(key, int): # normal row[1] designation
|
||||
try:
|
||||
return self._getValue(key)
|
||||
except IndexError:
|
||||
raise
|
||||
if isinstance(key, slice):
|
||||
indices = key.indices(self.rows.numberOfColumns)
|
||||
vl = [self._getValue(i) for i in range(*indices)]
|
||||
return tuple(vl)
|
||||
try:
|
||||
return self._getValue(
|
||||
self.rows.columnNames[key.lower()]
|
||||
) # extension row[columnName] designation
|
||||
except (KeyError, TypeError):
|
||||
er, st, tr = sys.exc_info()
|
||||
raise er(
|
||||
'No such key as "%s" in %s' % (repr(key), self.__repr__())
|
||||
).with_traceback(tr)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__next__())
|
||||
|
||||
def __next__(self):
|
||||
for n in range(self.rows.numberOfColumns):
|
||||
yield self._getValue(n)
|
||||
|
||||
def __repr__(self): # create a human readable representation
|
||||
taglist = sorted(list(self.rows.columnNames.items()), key=lambda x: x[1])
|
||||
s = "<SQLrow={"
|
||||
for name, i in taglist:
|
||||
s += name + ":" + repr(self._getValue(i)) + ", "
|
||||
return s[:-2] + "}>"
|
||||
|
||||
def __str__(self): # create a pretty human readable representation
|
||||
return str(
|
||||
tuple(str(self._getValue(i)) for i in range(self.rows.numberOfColumns))
|
||||
)
|
||||
|
||||
# TO-DO implement pickling an SQLrow directly
|
||||
# def __getstate__(self): return self.__dict__
|
||||
# def __setstate__(self, d): self.__dict__.update(d)
|
||||
# which basically tell pickle to treat your class just like a normal one,
|
||||
# taking self.__dict__ as representing the whole of the instance state,
|
||||
# despite the existence of the __getattr__.
|
||||
# # # #
|
||||
|
||||
|
||||
class SQLrows(object):
|
||||
# class to emulate a sequence for multiple rows using a container object
|
||||
def __init__(self, ado_results, numberOfRows, cursor):
|
||||
self.ado_results = ado_results # raw result of SQL get
|
||||
try:
|
||||
self.recordset_format = cursor.recordset_format
|
||||
self.numberOfColumns = cursor.numberOfColumns
|
||||
self.converters = cursor.converters
|
||||
self.columnNames = cursor.columnNames
|
||||
except AttributeError:
|
||||
self.recordset_format = RS_ARRAY
|
||||
self.numberOfColumns = 0
|
||||
self.converters = []
|
||||
self.columnNames = {}
|
||||
self.numberOfRows = numberOfRows
|
||||
|
||||
def __len__(self):
|
||||
return self.numberOfRows
|
||||
|
||||
def __getitem__(self, item): # used for row or row,column access
|
||||
if not self.ado_results:
|
||||
return []
|
||||
if isinstance(item, slice): # will return a list of row objects
|
||||
indices = item.indices(self.numberOfRows)
|
||||
return [SQLrow(self, k) for k in range(*indices)]
|
||||
elif isinstance(item, tuple) and len(item) == 2:
|
||||
# d = some_rowsObject[i,j] will return a datum from a two-dimension address
|
||||
i, j = item
|
||||
if not isinstance(j, int):
|
||||
try:
|
||||
j = self.columnNames[j.lower()] # convert named column to numeric
|
||||
except KeyError:
|
||||
raise KeyError('adodbapi: no such column name as "%s"' % repr(j))
|
||||
if self.recordset_format == RS_ARRAY: # retrieve from two-dimensional array
|
||||
v = self.ado_results[j, i]
|
||||
elif self.recordset_format == RS_REMOTE:
|
||||
v = self.ado_results[i][j]
|
||||
else: # pywin32 - retrieve from tuple of tuples
|
||||
v = self.ado_results[j][i]
|
||||
if self.converters is NotImplemented:
|
||||
return v
|
||||
return convert_to_python(v, self.converters[j])
|
||||
else:
|
||||
row = SQLrow(self, item) # new row descriptor
|
||||
return row
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__next__())
|
||||
|
||||
def __next__(self):
|
||||
for n in range(self.numberOfRows):
|
||||
row = SQLrow(self, n)
|
||||
yield row
|
||||
# # # # #
|
||||
|
||||
# # # # # functions to re-format SQL requests to other paramstyle requirements # # # # # # # # # #
|
||||
|
||||
|
||||
def changeNamedToQmark(
|
||||
op,
|
||||
): # convert from 'named' paramstyle to ADO required '?'mark parameters
|
||||
outOp = ""
|
||||
outparms = []
|
||||
chunks = op.split(
|
||||
"'"
|
||||
) # quote all literals -- odd numbered list results are literals.
|
||||
inQuotes = False
|
||||
for chunk in chunks:
|
||||
if inQuotes: # this is inside a quote
|
||||
if chunk == "": # double apostrophe to quote one apostrophe
|
||||
outOp = outOp[:-1] # so take one away
|
||||
else:
|
||||
outOp += "'" + chunk + "'" # else pass the quoted string as is.
|
||||
else: # is SQL code -- look for a :namedParameter
|
||||
while chunk: # some SQL string remains
|
||||
sp = chunk.split(":", 1)
|
||||
outOp += sp[0] # concat the part up to the :
|
||||
s = ""
|
||||
try:
|
||||
chunk = sp[1]
|
||||
except IndexError:
|
||||
chunk = None
|
||||
if chunk: # there was a parameter - parse it out
|
||||
i = 0
|
||||
c = chunk[0]
|
||||
while c.isalnum() or c == "_":
|
||||
i += 1
|
||||
try:
|
||||
c = chunk[i]
|
||||
except IndexError:
|
||||
break
|
||||
s = chunk[:i]
|
||||
chunk = chunk[i:]
|
||||
if s:
|
||||
outparms.append(s) # list the parameters in order
|
||||
outOp += "?" # put in the Qmark
|
||||
inQuotes = not inQuotes
|
||||
return outOp, outparms
|
||||
|
||||
|
||||
def changeFormatToQmark(
|
||||
op,
|
||||
): # convert from 'format' paramstyle to ADO required '?'mark parameters
|
||||
outOp = ""
|
||||
outparams = []
|
||||
chunks = op.split(
|
||||
"'"
|
||||
) # quote all literals -- odd numbered list results are literals.
|
||||
inQuotes = False
|
||||
for chunk in chunks:
|
||||
if inQuotes:
|
||||
if (
|
||||
outOp != "" and chunk == ""
|
||||
): # he used a double apostrophe to quote one apostrophe
|
||||
outOp = outOp[:-1] # so take one away
|
||||
else:
|
||||
outOp += "'" + chunk + "'" # else pass the quoted string as is.
|
||||
else: # is SQL code -- look for a %s parameter
|
||||
if "%(" in chunk: # ugh! pyformat!
|
||||
while chunk: # some SQL string remains
|
||||
sp = chunk.split("%(", 1)
|
||||
outOp += sp[0] # concat the part up to the %
|
||||
if len(sp) > 1:
|
||||
try:
|
||||
s, chunk = sp[1].split(")s", 1) # find the ')s'
|
||||
except ValueError:
|
||||
raise ProgrammingError(
|
||||
'Pyformat SQL has incorrect format near "%s"' % chunk
|
||||
)
|
||||
outparams.append(s)
|
||||
outOp += "?" # put in the Qmark
|
||||
else:
|
||||
chunk = None
|
||||
else: # proper '%s' format
|
||||
sp = chunk.split("%s") # make each %s
|
||||
outOp += "?".join(sp) # into ?
|
||||
inQuotes = not inQuotes # every other chunk is a quoted string
|
||||
return outOp, outparams
|
72
lib/adodbapi/examples/db_print.py
Normal file
72
lib/adodbapi/examples/db_print.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
""" db_print.py -- a simple demo for ADO database reads."""
|
||||
|
||||
import sys
|
||||
|
||||
import adodbapi.ado_consts as adc
|
||||
|
||||
cmd_args = ("filename", "table_name")
|
||||
if "help" in sys.argv:
|
||||
print("possible settings keywords are:", cmd_args)
|
||||
sys.exit()
|
||||
|
||||
kw_args = {} # pick up filename and proxy address from command line (optionally)
|
||||
for arg in sys.argv:
|
||||
s = arg.split("=")
|
||||
if len(s) > 1:
|
||||
if s[0] in cmd_args:
|
||||
kw_args[s[0]] = s[1]
|
||||
|
||||
kw_args.setdefault(
|
||||
"filename", "test.mdb"
|
||||
) # assumes server is running from examples folder
|
||||
kw_args.setdefault("table_name", "Products") # the name of the demo table
|
||||
|
||||
# the server needs to select the provider based on his Python installation
|
||||
provider_switch = ["provider", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
|
||||
|
||||
# ------------------------ START HERE -------------------------------------
|
||||
# create the connection
|
||||
constr = "Provider=%(provider)s;Data Source=%(filename)s"
|
||||
import adodbapi as db
|
||||
|
||||
con = db.connect(constr, kw_args, macro_is64bit=provider_switch)
|
||||
|
||||
if kw_args["table_name"] == "?":
|
||||
print("The tables in your database are:")
|
||||
for name in con.get_table_names():
|
||||
print(name)
|
||||
else:
|
||||
# make a cursor on the connection
|
||||
with con.cursor() as c:
|
||||
# run an SQL statement on the cursor
|
||||
sql = "select * from %s" % kw_args["table_name"]
|
||||
print('performing query="%s"' % sql)
|
||||
c.execute(sql)
|
||||
|
||||
# check the results
|
||||
print(
|
||||
'result rowcount shows as= %d. (Note: -1 means "not known")' % (c.rowcount,)
|
||||
)
|
||||
print("")
|
||||
print("result data description is:")
|
||||
print(" NAME Type DispSize IntrnlSz Prec Scale Null?")
|
||||
for d in c.description:
|
||||
print(
|
||||
("%16s %-12s %8s %8d %4d %5d %s")
|
||||
% (d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4], d[5], bool(d[6]))
|
||||
)
|
||||
print("")
|
||||
print("str() of first five records are...")
|
||||
|
||||
# get the results
|
||||
db = c.fetchmany(5)
|
||||
|
||||
# print them
|
||||
for rec in db:
|
||||
print(rec)
|
||||
|
||||
print("")
|
||||
print("repr() of next row is...")
|
||||
print(repr(c.fetchone()))
|
||||
print("")
|
||||
con.close()
|
20
lib/adodbapi/examples/db_table_names.py
Normal file
20
lib/adodbapi/examples/db_table_names.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
""" db_table_names.py -- a simple demo for ADO database table listing."""
|
||||
import sys
|
||||
|
||||
import adodbapi
|
||||
|
||||
try:
|
||||
databasename = sys.argv[1]
|
||||
except IndexError:
|
||||
databasename = "test.mdb"
|
||||
|
||||
provider = ["prv", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
|
||||
constr = "Provider=%(prv)s;Data Source=%(db)s"
|
||||
|
||||
# create the connection
|
||||
con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider)
|
||||
|
||||
print("Table names in= %s" % databasename)
|
||||
|
||||
for table in con.get_table_names():
|
||||
print(table)
|
41
lib/adodbapi/examples/xls_read.py
Normal file
41
lib/adodbapi/examples/xls_read.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import sys
|
||||
|
||||
import adodbapi
|
||||
|
||||
try:
|
||||
import adodbapi.is64bit as is64bit
|
||||
|
||||
is64 = is64bit.Python()
|
||||
except ImportError:
|
||||
is64 = False
|
||||
|
||||
if is64:
|
||||
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||
else:
|
||||
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||
extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"'
|
||||
|
||||
try: # first command line argument will be xls file name -- default to the one written by xls_write.py
|
||||
filename = sys.argv[1]
|
||||
except IndexError:
|
||||
filename = "xx.xls"
|
||||
|
||||
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
|
||||
|
||||
conn = adodbapi.connect(constr)
|
||||
|
||||
try: # second command line argument will be worksheet name -- default to first worksheet
|
||||
sheet = sys.argv[2]
|
||||
except IndexError:
|
||||
# use ADO feature to get the name of the first worksheet
|
||||
sheet = conn.get_table_names()[0]
|
||||
|
||||
print("Shreadsheet=%s Worksheet=%s" % (filename, sheet))
|
||||
print("------------------------------------------------------------")
|
||||
crsr = conn.cursor()
|
||||
sql = "SELECT * from [%s]" % sheet
|
||||
crsr.execute(sql)
|
||||
for row in crsr.fetchmany(10):
|
||||
print(repr(row))
|
||||
crsr.close()
|
||||
conn.close()
|
41
lib/adodbapi/examples/xls_write.py
Normal file
41
lib/adodbapi/examples/xls_write.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import datetime
|
||||
|
||||
import adodbapi
|
||||
|
||||
try:
|
||||
import adodbapi.is64bit as is64bit
|
||||
|
||||
is64 = is64bit.Python()
|
||||
except ImportError:
|
||||
is64 = False # in case the user has an old version of adodbapi
|
||||
if is64:
|
||||
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||
else:
|
||||
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||
filename = "xx.xls" # file will be created if it does not exist
|
||||
extended = 'Extended Properties="Excel 8.0;Readonly=False;"'
|
||||
|
||||
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
|
||||
|
||||
conn = adodbapi.connect(constr)
|
||||
with conn: # will auto commit if no errors
|
||||
with conn.cursor() as crsr:
|
||||
try:
|
||||
crsr.execute("drop table SheetOne")
|
||||
except:
|
||||
pass # just is case there is one already there
|
||||
|
||||
# create the sheet and the header row and set the types for the columns
|
||||
crsr.execute(
|
||||
"create table SheetOne (Name varchar, Rank varchar, SrvcNum integer, Weight float, Birth date)"
|
||||
)
|
||||
|
||||
sql = "INSERT INTO SheetOne (name, rank , srvcnum, weight, birth) values (?,?,?,?,?)"
|
||||
|
||||
data = ("Mike Murphy", "SSG", 123456789, 167.8, datetime.date(1922, 12, 27))
|
||||
crsr.execute(sql, data) # write the first row of data
|
||||
crsr.execute(
|
||||
sql, ["John Jones", "Pvt", 987654321, 140.0, datetime.date(1921, 7, 4)]
|
||||
) # another row of data
|
||||
conn.close()
|
||||
print("Created spreadsheet=%s worksheet=%s" % (filename, "SheetOne"))
|
41
lib/adodbapi/is64bit.py
Normal file
41
lib/adodbapi/is64bit.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
|
||||
import sys
|
||||
|
||||
|
||||
def Python():
|
||||
if sys.platform == "cli": # IronPython
|
||||
import System
|
||||
|
||||
return System.IntPtr.Size == 8
|
||||
else:
|
||||
try:
|
||||
return sys.maxsize > 2147483647
|
||||
except AttributeError:
|
||||
return sys.maxint > 2147483647
|
||||
|
||||
|
||||
def os():
|
||||
import platform
|
||||
|
||||
pm = platform.machine()
|
||||
if pm != ".." and pm.endswith("64"): # recent Python (not Iron)
|
||||
return True
|
||||
else:
|
||||
import os
|
||||
|
||||
if "PROCESSOR_ARCHITEW6432" in os.environ:
|
||||
return True # 32 bit program running on 64 bit Windows
|
||||
try:
|
||||
return os.environ["PROCESSOR_ARCHITECTURE"].endswith(
|
||||
"64"
|
||||
) # 64 bit Windows 64 bit program
|
||||
except (IndexError, KeyError):
|
||||
pass # not Windows
|
||||
try:
|
||||
return "64" in platform.architecture()[0] # this often works in Linux
|
||||
except:
|
||||
return False # is an older version of Python, assume also an older os (best we can guess)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("is64bit.Python() =", Python(), "is64bit.os() =", os())
|
506
lib/adodbapi/license.txt
Normal file
506
lib/adodbapi/license.txt
Normal file
|
@ -0,0 +1,506 @@
|
|||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 2.1, February 1999
|
||||
|
||||
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
[This is the first released version of the Lesser GPL. It also counts
|
||||
as the successor of the GNU Library Public License, version 2, hence
|
||||
the version number 2.1.]
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
|
||||
This license, the Lesser General Public License, applies to some
|
||||
specially designated software packages--typically libraries--of the
|
||||
Free Software Foundation and other authors who decide to use it. You
|
||||
can use it too, but we suggest you first think carefully about whether
|
||||
this license or the ordinary General Public License is the better
|
||||
strategy to use in any particular case, based on the explanations below.
|
||||
|
||||
When we speak of free software, we are referring to freedom of use,
|
||||
not price. Our General Public Licenses are designed to make sure that
|
||||
you have the freedom to distribute copies of free software (and charge
|
||||
for this service if you wish); that you receive source code or can get
|
||||
it if you want it; that you can change the software and use pieces of
|
||||
it in new free programs; and that you are informed that you can do
|
||||
these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
distributors to deny you these rights or to ask you to surrender these
|
||||
rights. These restrictions translate to certain responsibilities for
|
||||
you if you distribute copies of the library or if you modify it.
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link other code with the library, you must provide
|
||||
complete object files to the recipients, so that they can relink them
|
||||
with the library after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
|
||||
We protect your rights with a two-step method: (1) we copyright the
|
||||
library, and (2) we offer you this license, which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
|
||||
To protect each distributor, we want to make it very clear that
|
||||
there is no warranty for the free library. Also, if the library is
|
||||
modified by someone else and passed on, the recipients should know
|
||||
that what they have is not the original version, so that the original
|
||||
author's reputation will not be affected by problems that might be
|
||||
introduced by others.
|
||||
|
||||
|
||||
|
||||
Finally, software patents pose a constant threat to the existence of
|
||||
any free program. We wish to make sure that a company cannot
|
||||
effectively restrict the users of a free program by obtaining a
|
||||
restrictive license from a patent holder. Therefore, we insist that
|
||||
any patent license obtained for a version of the library must be
|
||||
consistent with the full freedom of use specified in this license.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the
|
||||
ordinary GNU General Public License. This license, the GNU Lesser
|
||||
General Public License, applies to certain designated libraries, and
|
||||
is quite different from the ordinary General Public License. We use
|
||||
this license for certain libraries in order to permit linking those
|
||||
libraries into non-free programs.
|
||||
|
||||
When a program is linked with a library, whether statically or using
|
||||
a shared library, the combination of the two is legally speaking a
|
||||
combined work, a derivative of the original library. The ordinary
|
||||
General Public License therefore permits such linking only if the
|
||||
entire combination fits its criteria of freedom. The Lesser General
|
||||
Public License permits more lax criteria for linking other code with
|
||||
the library.
|
||||
|
||||
We call this license the "Lesser" General Public License because it
|
||||
does Less to protect the user's freedom than the ordinary General
|
||||
Public License. It also provides other free software developers Less
|
||||
of an advantage over competing non-free programs. These disadvantages
|
||||
are the reason we use the ordinary General Public License for many
|
||||
libraries. However, the Lesser license provides advantages in certain
|
||||
special circumstances.
|
||||
|
||||
For example, on rare occasions, there may be a special need to
|
||||
encourage the widest possible use of a certain library, so that it becomes
|
||||
a de-facto standard. To achieve this, non-free programs must be
|
||||
allowed to use the library. A more frequent case is that a free
|
||||
library does the same job as widely used non-free libraries. In this
|
||||
case, there is little to gain by limiting the free library to free
|
||||
software only, so we use the Lesser General Public License.
|
||||
|
||||
In other cases, permission to use a particular library in non-free
|
||||
programs enables a greater number of people to use a large body of
|
||||
free software. For example, permission to use the GNU C Library in
|
||||
non-free programs enables many more people to use the whole GNU
|
||||
operating system, as well as its variant, the GNU/Linux operating
|
||||
system.
|
||||
|
||||
Although the Lesser General Public License is Less protective of the
|
||||
users' freedom, it does ensure that the user of a program that is
|
||||
linked with the Library has the freedom and the wherewithal to run
|
||||
that program using a modified version of the Library.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, whereas the latter must
|
||||
be combined with the library in order to run.
|
||||
|
||||
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library or other
|
||||
program which contains a notice placed by the copyright holder or
|
||||
other authorized party saying it may be distributed under the terms of
|
||||
this Lesser General Public License (also called "this License").
|
||||
Each licensee is addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control compilation
|
||||
and installation of the library.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote
|
||||
it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Library.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Library
|
||||
with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also combine or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a
|
||||
copy of the library already present on the user's computer system,
|
||||
rather than copying library functions into the executable, and (2)
|
||||
will operate properly with a modified version of the library, if
|
||||
the user installs one, as long as the modified version is
|
||||
interface-compatible with the version that the work was made with.
|
||||
|
||||
c) Accompany the work with a written offer, valid for at
|
||||
least three years, to give the same user the materials
|
||||
specified in Subsection 6a, above, for a charge no more
|
||||
than the cost of performing this distribution.
|
||||
|
||||
d) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
e) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the materials to be distributed need not include anything that is
|
||||
normally distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties with
|
||||
this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under any
|
||||
particular circumstance, the balance of the section is intended to apply,
|
||||
and the section as a whole is intended to apply in other circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License may add
|
||||
an explicit geographical distribution limitation excluding those countries,
|
||||
so that distribution is permitted only in or among countries not thus
|
||||
excluded. In such case, this License incorporates the limitation as if
|
||||
written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms of the
|
||||
ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library. It is
|
||||
safest to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least the
|
||||
"copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
||||
|
144
lib/adodbapi/process_connect_string.py
Normal file
144
lib/adodbapi/process_connect_string.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)"""
|
||||
from . import is64bit as is64bit
|
||||
|
||||
|
||||
def macro_call(macro_name, args, kwargs):
|
||||
"""allow the programmer to perform limited processing on the server by passing macro names and args
|
||||
|
||||
:new_key - the key name the macro will create
|
||||
:args[0] - macro name
|
||||
:args[1:] - any arguments
|
||||
:code - the value of the keyword item
|
||||
:kwargs - the connection keyword dictionary. ??key has been removed
|
||||
--> the value to put in for kwargs['name'] = value
|
||||
"""
|
||||
if isinstance(args, (str, str)):
|
||||
args = [
|
||||
args
|
||||
] # the user forgot to pass a sequence, so make a string into args[0]
|
||||
new_key = args[0]
|
||||
try:
|
||||
if macro_name == "is64bit":
|
||||
if is64bit.Python(): # if on 64 bit Python
|
||||
return new_key, args[1] # return first argument
|
||||
else:
|
||||
try:
|
||||
return new_key, args[2] # else return second argument (if defined)
|
||||
except IndexError:
|
||||
return new_key, "" # else return blank
|
||||
|
||||
elif (
|
||||
macro_name == "getuser"
|
||||
): # get the name of the user the server is logged in under
|
||||
if not new_key in kwargs:
|
||||
import getpass
|
||||
|
||||
return new_key, getpass.getuser()
|
||||
|
||||
elif macro_name == "getnode": # get the name of the computer running the server
|
||||
import platform
|
||||
|
||||
try:
|
||||
return new_key, args[1] % platform.node()
|
||||
except IndexError:
|
||||
return new_key, platform.node()
|
||||
|
||||
elif macro_name == "getenv": # expand the server's environment variable args[1]
|
||||
try:
|
||||
dflt = args[2] # if not found, default from args[2]
|
||||
except IndexError: # or blank
|
||||
dflt = ""
|
||||
return new_key, os.environ.get(args[1], dflt)
|
||||
|
||||
elif macro_name == "auto_security":
|
||||
if (
|
||||
not "user" in kwargs or not kwargs["user"]
|
||||
): # missing, blank, or Null username
|
||||
return new_key, "Integrated Security=SSPI"
|
||||
return new_key, "User ID=%(user)s; Password=%(password)s" % kwargs
|
||||
|
||||
elif (
|
||||
macro_name == "find_temp_test_path"
|
||||
): # helper function for testing ado operation -- undocumented
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
return new_key, os.path.join(
|
||||
tempfile.gettempdir(), "adodbapi_test", args[1]
|
||||
)
|
||||
|
||||
raise ValueError("Unknown connect string macro=%s" % macro_name)
|
||||
except:
|
||||
raise ValueError("Error in macro processing %s %s" % (macro_name, repr(args)))
|
||||
|
||||
|
||||
def process(
|
||||
args, kwargs, expand_macros=False
|
||||
): # --> connection string with keyword arguments processed.
|
||||
"""attempts to inject arguments into a connection string using Python "%" operator for strings
|
||||
|
||||
co: adodbapi connection object
|
||||
args: positional parameters from the .connect() call
|
||||
kvargs: keyword arguments from the .connect() call
|
||||
"""
|
||||
try:
|
||||
dsn = args[0]
|
||||
except IndexError:
|
||||
dsn = None
|
||||
if isinstance(
|
||||
dsn, dict
|
||||
): # as a convenience the first argument may be django settings
|
||||
kwargs.update(dsn)
|
||||
elif (
|
||||
dsn
|
||||
): # the connection string is passed to the connection as part of the keyword dictionary
|
||||
kwargs["connection_string"] = dsn
|
||||
try:
|
||||
a1 = args[1]
|
||||
except IndexError:
|
||||
a1 = None
|
||||
# historically, the second positional argument might be a timeout value
|
||||
if isinstance(a1, int):
|
||||
kwargs["timeout"] = a1
|
||||
# if the second positional argument is a string, then it is user
|
||||
elif isinstance(a1, str):
|
||||
kwargs["user"] = a1
|
||||
# if the second positional argument is a dictionary, use it as keyword arguments, too
|
||||
elif isinstance(a1, dict):
|
||||
kwargs.update(a1)
|
||||
try:
|
||||
kwargs["password"] = args[2] # the third positional argument is password
|
||||
kwargs["host"] = args[3] # the fourth positional argument is host name
|
||||
kwargs["database"] = args[4] # the fifth positional argument is database name
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# make sure connection string is defined somehow
|
||||
if not "connection_string" in kwargs:
|
||||
try: # perhaps 'dsn' was defined
|
||||
kwargs["connection_string"] = kwargs["dsn"]
|
||||
except KeyError:
|
||||
try: # as a last effort, use the "host" keyword
|
||||
kwargs["connection_string"] = kwargs["host"]
|
||||
except KeyError:
|
||||
raise TypeError("Must define 'connection_string' for ado connections")
|
||||
if expand_macros:
|
||||
for kwarg in list(kwargs.keys()):
|
||||
if kwarg.startswith("macro_"): # If a key defines a macro
|
||||
macro_name = kwarg[6:] # name without the "macro_"
|
||||
macro_code = kwargs.pop(
|
||||
kwarg
|
||||
) # we remove the macro_key and get the code to execute
|
||||
new_key, rslt = macro_call(
|
||||
macro_name, macro_code, kwargs
|
||||
) # run the code in the local context
|
||||
kwargs[new_key] = rslt # put the result back in the keywords dict
|
||||
# special processing for PyRO IPv6 host address
|
||||
try:
|
||||
s = kwargs["proxy_host"]
|
||||
if ":" in s: # it is an IPv6 address
|
||||
if s[0] != "[": # is not surrounded by brackets
|
||||
kwargs["proxy_host"] = s.join(("[", "]")) # put it in brackets
|
||||
except KeyError:
|
||||
pass
|
||||
return kwargs
|
92
lib/adodbapi/readme.txt
Normal file
92
lib/adodbapi/readme.txt
Normal file
|
@ -0,0 +1,92 @@
|
|||
Project
|
||||
-------
|
||||
adodbapi
|
||||
|
||||
A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO
|
||||
for connecting with databases and other data sources
|
||||
using either CPython or IronPython.
|
||||
|
||||
Home page: <http://sourceforge.net/projects/adodbapi>
|
||||
|
||||
Features:
|
||||
* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations).
|
||||
* Includes pyunit testcases that describe how to use the module.
|
||||
* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+
|
||||
* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions.
|
||||
* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic'
|
||||
* Supports data retrieval by column name e.g.:
|
||||
for row in myCurser.execute("select name,age from students"):
|
||||
print("Student", row.name, "is", row.age, "years old.")
|
||||
* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column)
|
||||
|
||||
Prerequisites:
|
||||
* C Python 2.7 or 3.5 or higher
|
||||
and pywin32 (Mark Hammond's python for windows extensions.)
|
||||
or
|
||||
Iron Python 2.7 or higher. (works in IPy2.0 for all data types except BUFFER)
|
||||
|
||||
Installation:
|
||||
* (C-Python on Windows): Install pywin32 ("pip install pywin32") which includes adodbapi.
|
||||
* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip.
|
||||
Open a command window as an administrator. CD to the folder containing the unzipped files.
|
||||
Run "setup.py install" using the IronPython of your choice.
|
||||
|
||||
NOTE: ...........
|
||||
If you do not like the new default operation of returning Numeric columns as decimal.Decimal,
|
||||
you can select other options by the user defined conversion feature.
|
||||
Try:
|
||||
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString
|
||||
or:
|
||||
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat
|
||||
or:
|
||||
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function
|
||||
............
|
||||
notes for 2.6.2:
|
||||
The definitive source has been moved to https://github.com/mhammond/pywin32/tree/master/adodbapi.
|
||||
Remote has proven too hard to configure and test with Pyro4. I am moving it to unsupported status
|
||||
until I can change to a different connection method.
|
||||
whats new in version 2.6
|
||||
A cursor.prepare() method and support for prepared SQL statements.
|
||||
Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code).
|
||||
The quick start document 'quick_reference.odt' will export as a nice-looking pdf.
|
||||
Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of
|
||||
parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_
|
||||
pass a dictionary of parameters -- provided your SQL operation string is formatted correctly.
|
||||
|
||||
whats new in version 2.5
|
||||
Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO
|
||||
Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server
|
||||
(server has simple connection string macros: is64bit, getuser, sql_provider, auto_security)
|
||||
Brief documentation included. See adodbapi/examples folder adodbapi.rtf
|
||||
New connection method conn.get_table_names() --> list of names of tables in database
|
||||
|
||||
Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module.
|
||||
Many former module-level attributes are now class attributes. (Should be more thread-safe)
|
||||
Connection objects are now context managers for transactions and will commit or rollback.
|
||||
Cursor objects are context managers and will automatically close themselves.
|
||||
Autocommit can be switched on and off.
|
||||
Keyword and positional arguments on the connect() method work as documented in PEP 249.
|
||||
Keyword arguments from the connect call can be formatted into the connection string.
|
||||
New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port.
|
||||
*** Breaking change: variantConversion lookups are simplified: the following will raise KeyError:
|
||||
oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes]
|
||||
Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]]
|
||||
|
||||
License
|
||||
-------
|
||||
LGPL, see http://www.opensource.org/licenses/lgpl-license.php
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Look at adodbapi/quick_reference.md
|
||||
http://www.python.org/topics/database/DatabaseAPI-2.0.html
|
||||
read the examples in adodbapi/examples
|
||||
and look at the test cases in adodbapi/test directory.
|
||||
|
||||
Mailing lists
|
||||
-------------
|
||||
The adodbapi mailing lists have been deactivated. Submit comments to the
|
||||
pywin32 or IronPython mailing lists.
|
||||
-- the bug tracker on sourceforge.net/projects/adodbapi may be checked, (infrequently).
|
||||
-- please use: https://github.com/mhammond/pywin32/issues
|
634
lib/adodbapi/remote.py
Normal file
634
lib/adodbapi/remote.py
Normal file
|
@ -0,0 +1,634 @@
|
|||
"""adodbapi.remote - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||
|
||||
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||
* http://sourceforge.net/projects/pywin32
|
||||
* http://sourceforge.net/projects/adodbapi
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
django adaptations and refactoring thanks to Adam Vandenberg
|
||||
|
||||
DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/
|
||||
|
||||
This module source should run correctly in CPython versions 2.5 and later,
|
||||
or IronPython version 2.7 and later,
|
||||
or, after running through 2to3.py, CPython 3.0 or later.
|
||||
"""
|
||||
|
||||
__version__ = "2.6.0.4"
|
||||
version = "adodbapi.remote v" + __version__
|
||||
|
||||
import array
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
# Pyro4 is required for server and remote operation --> https://pypi.python.org/pypi/Pyro4/
|
||||
try:
|
||||
import Pyro4
|
||||
except ImportError:
|
||||
print('* * * Sorry, server operation requires Pyro4. Please "pip import" it.')
|
||||
exit(11)
|
||||
|
||||
import adodbapi
|
||||
import adodbapi.apibase as api
|
||||
import adodbapi.process_connect_string
|
||||
from adodbapi.apibase import ProgrammingError
|
||||
|
||||
_BaseException = api._BaseException
|
||||
|
||||
sys.excepthook = Pyro4.util.excepthook
|
||||
Pyro4.config.PREFER_IP_VERSION = 0 # allow system to prefer IPv6
|
||||
Pyro4.config.COMMTIMEOUT = 40.0 # a bit longer than the default SQL server Gtimeout
|
||||
Pyro4.config.SERIALIZER = "pickle"
|
||||
|
||||
try:
|
||||
verbose = int(os.environ["ADODBAPI_VERBOSE"])
|
||||
except:
|
||||
verbose = False
|
||||
if verbose:
|
||||
print(version)
|
||||
|
||||
# --- define objects to smooth out Python3 <-> Python 2.x differences
|
||||
unicodeType = str # this line will be altered by 2to3.py to '= str'
|
||||
longType = int # this line will be altered by 2to3.py to '= int'
|
||||
StringTypes = str
|
||||
makeByteBuffer = bytes
|
||||
memoryViewType = memoryview
|
||||
|
||||
# -----------------------------------------------------------
|
||||
# conversion functions mandated by PEP 249
|
||||
Binary = makeByteBuffer # override the function from apibase.py
|
||||
|
||||
|
||||
def Date(year, month, day):
|
||||
return datetime.date(year, month, day) # dateconverter.Date(year,month,day)
|
||||
|
||||
|
||||
def Time(hour, minute, second):
|
||||
return datetime.time(hour, minute, second) # dateconverter.Time(hour,minute,second)
|
||||
|
||||
|
||||
def Timestamp(year, month, day, hour, minute, second):
|
||||
return datetime.datetime(year, month, day, hour, minute, second)
|
||||
|
||||
|
||||
def DateFromTicks(ticks):
|
||||
return Date(*time.gmtime(ticks)[:3])
|
||||
|
||||
|
||||
def TimeFromTicks(ticks):
|
||||
return Time(*time.gmtime(ticks)[3:6])
|
||||
|
||||
|
||||
def TimestampFromTicks(ticks):
|
||||
return Timestamp(*time.gmtime(ticks)[:6])
|
||||
|
||||
|
||||
def connect(*args, **kwargs): # --> a remote db-api connection object
|
||||
"""Create and open a remote db-api database connection object"""
|
||||
# process the argument list the programmer gave us
|
||||
kwargs = adodbapi.process_connect_string.process(args, kwargs)
|
||||
# the "proxy_xxx" keys tell us where to find the PyRO proxy server
|
||||
kwargs.setdefault(
|
||||
"pyro_connection", "PYRO:ado.connection@%(proxy_host)s:%(proxy_port)s"
|
||||
)
|
||||
if not "proxy_port" in kwargs:
|
||||
try:
|
||||
pport = os.environ["PROXY_PORT"]
|
||||
except KeyError:
|
||||
pport = 9099
|
||||
kwargs["proxy_port"] = pport
|
||||
if not "proxy_host" in kwargs or not kwargs["proxy_host"]:
|
||||
try:
|
||||
phost = os.environ["PROXY_HOST"]
|
||||
except KeyError:
|
||||
phost = "[::1]" # '127.0.0.1'
|
||||
kwargs["proxy_host"] = phost
|
||||
ado_uri = kwargs["pyro_connection"] % kwargs
|
||||
# ask PyRO make us a remote connection object
|
||||
auto_retry = 3
|
||||
while auto_retry:
|
||||
try:
|
||||
dispatcher = Pyro4.Proxy(ado_uri)
|
||||
if "comm_timeout" in kwargs:
|
||||
dispatcher._pyroTimeout = float(kwargs["comm_timeout"])
|
||||
uri = dispatcher.make_connection()
|
||||
break
|
||||
except Pyro4.core.errors.PyroError:
|
||||
auto_retry -= 1
|
||||
if auto_retry:
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise api.DatabaseError("Cannot create connection to=%s" % ado_uri)
|
||||
|
||||
conn_uri = fix_uri(uri, kwargs) # get a host connection from the proxy server
|
||||
while auto_retry:
|
||||
try:
|
||||
host_conn = Pyro4.Proxy(
|
||||
conn_uri
|
||||
) # bring up an exclusive Pyro connection for my ADO connection
|
||||
break
|
||||
except Pyro4.core.errors.PyroError:
|
||||
auto_retry -= 1
|
||||
if auto_retry:
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise api.DatabaseError(
|
||||
"Cannot create ADO connection object using=%s" % conn_uri
|
||||
)
|
||||
if "comm_timeout" in kwargs:
|
||||
host_conn._pyroTimeout = float(kwargs["comm_timeout"])
|
||||
# make a local clone
|
||||
myConn = Connection()
|
||||
while auto_retry:
|
||||
try:
|
||||
myConn.connect(
|
||||
kwargs, host_conn
|
||||
) # call my connect method -- hand him the host connection
|
||||
break
|
||||
except Pyro4.core.errors.PyroError:
|
||||
auto_retry -= 1
|
||||
if auto_retry:
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise api.DatabaseError(
|
||||
"Pyro error creating connection to/thru=%s" % repr(kwargs)
|
||||
)
|
||||
except _BaseException as e:
|
||||
raise api.DatabaseError(
|
||||
"Error creating remote connection to=%s, e=%s, %s"
|
||||
% (repr(kwargs), repr(e), sys.exc_info()[2])
|
||||
)
|
||||
return myConn
|
||||
|
||||
|
||||
def fix_uri(uri, kwargs):
|
||||
"""convert a generic pyro uri with '0.0.0.0' into the address we actually called"""
|
||||
u = uri.asString()
|
||||
s = u.split("[::0]") # IPv6 generic address
|
||||
if len(s) == 1: # did not find one
|
||||
s = u.split("0.0.0.0") # IPv4 generic address
|
||||
if len(s) > 1: # found a generic
|
||||
return kwargs["proxy_host"].join(s) # fill in our address for the host
|
||||
return uri
|
||||
|
||||
|
||||
# # # # # ----- the Class that defines a connection ----- # # # # #
|
||||
class Connection(object):
|
||||
# include connection attributes required by api definition.
|
||||
Warning = api.Warning
|
||||
Error = api.Error
|
||||
InterfaceError = api.InterfaceError
|
||||
DataError = api.DataError
|
||||
DatabaseError = api.DatabaseError
|
||||
OperationalError = api.OperationalError
|
||||
IntegrityError = api.IntegrityError
|
||||
InternalError = api.InternalError
|
||||
NotSupportedError = api.NotSupportedError
|
||||
ProgrammingError = api.ProgrammingError
|
||||
# set up some class attributes
|
||||
paramstyle = api.paramstyle
|
||||
|
||||
@property
|
||||
def dbapi(self): # a proposed db-api version 3 extension.
|
||||
"Return a reference to the DBAPI module for this Connection."
|
||||
return api
|
||||
|
||||
def __init__(self):
|
||||
self.proxy = None
|
||||
self.kwargs = {}
|
||||
self.errorhandler = None
|
||||
self.supportsTransactions = False
|
||||
self.paramstyle = api.paramstyle
|
||||
self.timeout = 30
|
||||
self.cursors = {}
|
||||
|
||||
def connect(self, kwargs, connection_maker):
|
||||
self.kwargs = kwargs
|
||||
if verbose:
|
||||
print('%s attempting: "%s"' % (version, repr(kwargs)))
|
||||
self.proxy = connection_maker
|
||||
##try:
|
||||
ret = self.proxy.connect(kwargs) # ask the server to hook us up
|
||||
##except ImportError, e: # Pyro is trying to import pywinTypes.comerrer
|
||||
## self._raiseConnectionError(api.DatabaseError, 'Proxy cannot connect using=%s' % repr(kwargs))
|
||||
if ret is not True:
|
||||
self._raiseConnectionError(
|
||||
api.OperationalError, "Proxy returns error message=%s" % repr(ret)
|
||||
)
|
||||
|
||||
self.supportsTransactions = self.getIndexedValue("supportsTransactions")
|
||||
self.paramstyle = self.getIndexedValue("paramstyle")
|
||||
self.timeout = self.getIndexedValue("timeout")
|
||||
if verbose:
|
||||
print("adodbapi.remote New connection at %X" % id(self))
|
||||
|
||||
def _raiseConnectionError(self, errorclass, errorvalue):
|
||||
eh = self.errorhandler
|
||||
if eh is None:
|
||||
eh = api.standardErrorHandler
|
||||
eh(self, None, errorclass, errorvalue)
|
||||
|
||||
def close(self):
|
||||
"""Close the connection now (rather than whenever __del__ is called).
|
||||
|
||||
The connection will be unusable from this point forward;
|
||||
an Error (or subclass) exception will be raised if any operation is attempted with the connection.
|
||||
The same applies to all cursor objects trying to use the connection.
|
||||
"""
|
||||
for crsr in list(self.cursors.values())[
|
||||
:
|
||||
]: # copy the list, then close each one
|
||||
crsr.close()
|
||||
try:
|
||||
"""close the underlying remote Connection object"""
|
||||
self.proxy.close()
|
||||
if verbose:
|
||||
print("adodbapi.remote Closed connection at %X" % id(self))
|
||||
object.__delattr__(
|
||||
self, "proxy"
|
||||
) # future attempts to use closed cursor will be caught by __getattr__
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.proxy.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
def commit(self):
|
||||
"""Commit any pending transaction to the database.
|
||||
|
||||
Note that if the database supports an auto-commit feature,
|
||||
this must be initially off. An interface method may be provided to turn it back on.
|
||||
Database modules that do not support transactions should implement this method with void functionality.
|
||||
"""
|
||||
if not self.supportsTransactions:
|
||||
return
|
||||
result = self.proxy.commit()
|
||||
if result:
|
||||
self._raiseConnectionError(
|
||||
api.OperationalError, "Error during commit: %s" % result
|
||||
)
|
||||
|
||||
def _rollback(self):
|
||||
"""In case a database does provide transactions this method causes the the database to roll back to
|
||||
the start of any pending transaction. Closing a connection without committing the changes first will
|
||||
cause an implicit rollback to be performed.
|
||||
"""
|
||||
result = self.proxy.rollback()
|
||||
if result:
|
||||
self._raiseConnectionError(
|
||||
api.OperationalError, "Error during rollback: %s" % result
|
||||
)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in ("paramstyle", "timeout", "autocommit"):
|
||||
if self.proxy:
|
||||
self.proxy.send_attribute_to_host(name, value)
|
||||
object.__setattr__(self, name, value) # store attribute locally (too)
|
||||
|
||||
def __getattr__(self, item):
|
||||
if (
|
||||
item == "rollback"
|
||||
): # the rollback method only appears if the database supports transactions
|
||||
if self.supportsTransactions:
|
||||
return (
|
||||
self._rollback
|
||||
) # return the rollback method so the caller can execute it.
|
||||
else:
|
||||
raise self.ProgrammingError(
|
||||
"this data provider does not support Rollback"
|
||||
)
|
||||
elif item in (
|
||||
"dbms_name",
|
||||
"dbms_version",
|
||||
"connection_string",
|
||||
"autocommit",
|
||||
): # 'messages' ):
|
||||
return self.getIndexedValue(item)
|
||||
elif item == "proxy":
|
||||
raise self.ProgrammingError("Attempting to use closed connection")
|
||||
else:
|
||||
raise self.ProgrammingError('No remote access for attribute="%s"' % item)
|
||||
|
||||
def getIndexedValue(self, index):
|
||||
r = self.proxy.get_attribute_for_remote(index)
|
||||
return r
|
||||
|
||||
def cursor(self):
|
||||
"Return a new Cursor Object using the connection."
|
||||
myCursor = Cursor(self)
|
||||
return myCursor
|
||||
|
||||
def _i_am_here(self, crsr):
|
||||
"message from a new cursor proclaiming its existence"
|
||||
self.cursors[crsr.id] = crsr
|
||||
|
||||
def _i_am_closing(self, crsr):
|
||||
"message from a cursor giving connection a chance to clean up"
|
||||
try:
|
||||
del self.cursors[crsr.id]
|
||||
except:
|
||||
pass
|
||||
|
||||
def __enter__(self): # Connections are context managers
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type:
|
||||
self._rollback() # automatic rollback on errors
|
||||
else:
|
||||
self.commit()
|
||||
|
||||
def get_table_names(self):
|
||||
return self.proxy.get_table_names()
|
||||
|
||||
|
||||
def fixpickle(x):
|
||||
"""pickle barfs on buffer(x) so we pass as array.array(x) then restore to original form for .execute()"""
|
||||
if x is None:
|
||||
return None
|
||||
if isinstance(x, dict):
|
||||
# for 'named' paramstyle user will pass a mapping
|
||||
newargs = {}
|
||||
for arg, val in list(x.items()):
|
||||
if isinstance(val, memoryViewType):
|
||||
newval = array.array("B")
|
||||
newval.fromstring(val)
|
||||
newargs[arg] = newval
|
||||
else:
|
||||
newargs[arg] = val
|
||||
return newargs
|
||||
# if not a mapping, then a sequence
|
||||
newargs = []
|
||||
for arg in x:
|
||||
if isinstance(arg, memoryViewType):
|
||||
newarg = array.array("B")
|
||||
newarg.fromstring(arg)
|
||||
newargs.append(newarg)
|
||||
else:
|
||||
newargs.append(arg)
|
||||
return newargs
|
||||
|
||||
|
||||
class Cursor(object):
|
||||
def __init__(self, connection):
|
||||
self.command = None
|
||||
self.errorhandler = None ## was: connection.errorhandler
|
||||
self.connection = connection
|
||||
self.proxy = self.connection.proxy
|
||||
self.rs = None # the fetchable data for this cursor
|
||||
self.converters = NotImplemented
|
||||
self.id = connection.proxy.build_cursor()
|
||||
connection._i_am_here(self)
|
||||
self.recordset_format = api.RS_REMOTE
|
||||
if verbose:
|
||||
print(
|
||||
"%s New cursor at %X on conn %X"
|
||||
% (version, id(self), id(self.connection))
|
||||
)
|
||||
|
||||
def prepare(self, operation):
|
||||
self.command = operation
|
||||
try:
|
||||
del self.description
|
||||
except AttributeError:
|
||||
pass
|
||||
self.proxy.crsr_prepare(self.id, operation)
|
||||
|
||||
def __iter__(self): # [2.1 Zamarev]
|
||||
return iter(self.fetchone, None) # [2.1 Zamarev]
|
||||
|
||||
def __next__(self):
|
||||
r = self.fetchone()
|
||||
if r:
|
||||
return r
|
||||
raise StopIteration
|
||||
|
||||
def __enter__(self):
|
||||
"Allow database cursors to be used with context managers."
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"Allow database cursors to be used with context managers."
|
||||
self.close()
|
||||
|
||||
def __getattr__(self, key):
|
||||
if key == "numberOfColumns":
|
||||
try:
|
||||
return len(self.rs[0])
|
||||
except:
|
||||
return 0
|
||||
if key == "description":
|
||||
try:
|
||||
self.description = self.proxy.crsr_get_description(self.id)[:]
|
||||
return self.description
|
||||
except TypeError:
|
||||
return None
|
||||
if key == "columnNames":
|
||||
try:
|
||||
r = dict(
|
||||
self.proxy.crsr_get_columnNames(self.id)
|
||||
) # copy the remote columns
|
||||
|
||||
except TypeError:
|
||||
r = {}
|
||||
self.columnNames = r
|
||||
return r
|
||||
|
||||
if key == "remote_cursor":
|
||||
raise api.OperationalError
|
||||
try:
|
||||
return self.proxy.crsr_get_attribute_for_remote(self.id, key)
|
||||
except AttributeError:
|
||||
raise api.InternalError(
|
||||
'Failure getting attribute "%s" from proxy cursor.' % key
|
||||
)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key == "arraysize":
|
||||
self.proxy.crsr_set_arraysize(self.id, value)
|
||||
if key == "paramstyle":
|
||||
if value in api.accepted_paramstyles:
|
||||
self.proxy.crsr_set_paramstyle(self.id, value)
|
||||
else:
|
||||
self._raiseCursorError(
|
||||
api.ProgrammingError, 'invalid paramstyle ="%s"' % value
|
||||
)
|
||||
object.__setattr__(self, key, value)
|
||||
|
||||
def _raiseCursorError(self, errorclass, errorvalue):
|
||||
eh = self.errorhandler
|
||||
if eh is None:
|
||||
eh = api.standardErrorHandler
|
||||
eh(self.connection, self, errorclass, errorvalue)
|
||||
|
||||
def execute(self, operation, parameters=None):
|
||||
if self.connection is None:
|
||||
self._raiseCursorError(
|
||||
ProgrammingError, "Attempted operation on closed cursor"
|
||||
)
|
||||
self.command = operation
|
||||
try:
|
||||
del self.description
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del self.columnNames
|
||||
except AttributeError:
|
||||
pass
|
||||
fp = fixpickle(parameters)
|
||||
if verbose > 2:
|
||||
print(
|
||||
(
|
||||
'%s executing "%s" with params=%s'
|
||||
% (version, operation, repr(parameters))
|
||||
)
|
||||
)
|
||||
result = self.proxy.crsr_execute(self.id, operation, fp)
|
||||
if result: # an exception was triggered
|
||||
self._raiseCursorError(result[0], result[1])
|
||||
|
||||
def executemany(self, operation, seq_of_parameters):
|
||||
if self.connection is None:
|
||||
self._raiseCursorError(
|
||||
ProgrammingError, "Attempted operation on closed cursor"
|
||||
)
|
||||
self.command = operation
|
||||
try:
|
||||
del self.description
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del self.columnNames
|
||||
except AttributeError:
|
||||
pass
|
||||
sq = [fixpickle(x) for x in seq_of_parameters]
|
||||
if verbose > 2:
|
||||
print(
|
||||
(
|
||||
'%s executemany "%s" with params=%s'
|
||||
% (version, operation, repr(seq_of_parameters))
|
||||
)
|
||||
)
|
||||
self.proxy.crsr_executemany(self.id, operation, sq)
|
||||
|
||||
def nextset(self):
|
||||
try:
|
||||
del self.description
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del self.columnNames
|
||||
except AttributeError:
|
||||
pass
|
||||
if verbose > 2:
|
||||
print(("%s nextset" % version))
|
||||
return self.proxy.crsr_nextset(self.id)
|
||||
|
||||
def callproc(self, procname, parameters=None):
|
||||
if self.connection is None:
|
||||
self._raiseCursorError(
|
||||
ProgrammingError, "Attempted operation on closed cursor"
|
||||
)
|
||||
self.command = procname
|
||||
try:
|
||||
del self.description
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
del self.columnNames
|
||||
except AttributeError:
|
||||
pass
|
||||
fp = fixpickle(parameters)
|
||||
if verbose > 2:
|
||||
print(
|
||||
(
|
||||
'%s callproc "%s" with params=%s'
|
||||
% (version, procname, repr(parameters))
|
||||
)
|
||||
)
|
||||
return self.proxy.crsr_callproc(self.id, procname, fp)
|
||||
|
||||
def fetchone(self):
|
||||
try:
|
||||
f1 = self.proxy.crsr_fetchone(self.id)
|
||||
except _BaseException as e:
|
||||
self._raiseCursorError(api.DatabaseError, e)
|
||||
else:
|
||||
if f1 is None:
|
||||
return None
|
||||
self.rs = [f1]
|
||||
return api.SQLrows(self.rs, 1, self)[
|
||||
0
|
||||
] # new object to hold the results of the fetch
|
||||
|
||||
def fetchmany(self, size=None):
|
||||
try:
|
||||
self.rs = self.proxy.crsr_fetchmany(self.id, size)
|
||||
if not self.rs:
|
||||
return []
|
||||
r = api.SQLrows(self.rs, len(self.rs), self)
|
||||
return r
|
||||
except Exception as e:
|
||||
self._raiseCursorError(api.DatabaseError, e)
|
||||
|
||||
def fetchall(self):
|
||||
try:
|
||||
self.rs = self.proxy.crsr_fetchall(self.id)
|
||||
if not self.rs:
|
||||
return []
|
||||
return api.SQLrows(self.rs, len(self.rs), self)
|
||||
except Exception as e:
|
||||
self._raiseCursorError(api.DatabaseError, e)
|
||||
|
||||
def close(self):
|
||||
if self.connection is None:
|
||||
return
|
||||
self.connection._i_am_closing(self) # take me off the connection's cursors list
|
||||
try:
|
||||
self.proxy.crsr_close(self.id)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
del self.description
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
del self.rs # let go of the recordset
|
||||
except:
|
||||
pass
|
||||
self.connection = (
|
||||
None # this will make all future method calls on me throw an exception
|
||||
)
|
||||
self.proxy = None
|
||||
if verbose:
|
||||
print("adodbapi.remote Closed cursor at %X" % id(self))
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
def setinputsizes(self, sizes):
|
||||
pass
|
||||
|
||||
def setoutputsize(self, size, column=None):
|
||||
pass
|
15
lib/adodbapi/schema_table.py
Normal file
15
lib/adodbapi/schema_table.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
"""call using an open ADO connection --> list of table names"""
|
||||
from . import adodbapi
|
||||
|
||||
|
||||
def names(connection_object):
|
||||
ado = connection_object.adoConn
|
||||
schema = ado.OpenSchema(20) # constant = adSchemaTables
|
||||
|
||||
tables = []
|
||||
while not schema.EOF:
|
||||
name = adodbapi.getIndexedValue(schema.Fields, "TABLE_NAME").Value
|
||||
tables.append(name)
|
||||
schema.MoveNext()
|
||||
del schema
|
||||
return tables
|
70
lib/adodbapi/setup.py
Normal file
70
lib/adodbapi/setup.py
Normal file
|
@ -0,0 +1,70 @@
|
|||
"""adodbapi -- a pure Python PEP 249 DB-API package using Microsoft ADO
|
||||
|
||||
Adodbapi can be run on CPython 3.5 and later.
|
||||
or IronPython version 2.6 and later (in theory, possibly no longer in practice!)
|
||||
"""
|
||||
CLASSIFIERS = """\
|
||||
Development Status :: 5 - Production/Stable
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
|
||||
Operating System :: Microsoft :: Windows
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: SQL
|
||||
Topic :: Software Development
|
||||
Topic :: Software Development :: Libraries :: Python Modules
|
||||
Topic :: Database
|
||||
"""
|
||||
|
||||
NAME = "adodbapi"
|
||||
MAINTAINER = "Vernon Cole"
|
||||
MAINTAINER_EMAIL = "vernondcole@gmail.com"
|
||||
DESCRIPTION = (
|
||||
"""A pure Python package implementing PEP 249 DB-API using Microsoft ADO."""
|
||||
)
|
||||
URL = "http://sourceforge.net/projects/adodbapi"
|
||||
LICENSE = "LGPL"
|
||||
CLASSIFIERS = filter(None, CLASSIFIERS.split("\n"))
|
||||
AUTHOR = "Henrik Ekelund, Vernon Cole, et.al."
|
||||
AUTHOR_EMAIL = "vernondcole@gmail.com"
|
||||
PLATFORMS = ["Windows", "Linux"]
|
||||
|
||||
VERSION = None # in case searching for version fails
|
||||
a = open("adodbapi.py") # find the version string in the source code
|
||||
for line in a:
|
||||
if "__version__" in line:
|
||||
VERSION = line.split("'")[1]
|
||||
print('adodbapi version="%s"' % VERSION)
|
||||
break
|
||||
a.close()
|
||||
|
||||
|
||||
def setup_package():
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.core import setup
|
||||
|
||||
setup(
|
||||
cmdclass={"build_py": build_py},
|
||||
name=NAME,
|
||||
maintainer=MAINTAINER,
|
||||
maintainer_email=MAINTAINER_EMAIL,
|
||||
description=DESCRIPTION,
|
||||
url=URL,
|
||||
keywords="database ado odbc dbapi db-api Microsoft SQL",
|
||||
## download_url=DOWNLOAD_URL,
|
||||
long_description=open("README.txt").read(),
|
||||
license=LICENSE,
|
||||
classifiers=CLASSIFIERS,
|
||||
author=AUTHOR,
|
||||
author_email=AUTHOR_EMAIL,
|
||||
platforms=PLATFORMS,
|
||||
version=VERSION,
|
||||
package_dir={"adodbapi": ""},
|
||||
packages=["adodbapi"],
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup_package()
|
1692
lib/adodbapi/test/adodbapitest.py
Normal file
1692
lib/adodbapi/test/adodbapitest.py
Normal file
File diff suppressed because it is too large
Load diff
221
lib/adodbapi/test/adodbapitestconfig.py
Normal file
221
lib/adodbapi/test/adodbapitestconfig.py
Normal file
|
@ -0,0 +1,221 @@
|
|||
# Configure this to _YOUR_ environment in order to run the testcases.
|
||||
"testADOdbapiConfig.py v 2.6.2.B00"
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# #
|
||||
# # TESTERS:
|
||||
# #
|
||||
# # You will need to make numerous modifications to this file
|
||||
# # to adapt it to your own testing environment.
|
||||
# #
|
||||
# # Skip down to the next "# #" line --
|
||||
# # -- the things you need to change are below it.
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
import platform
|
||||
import random
|
||||
import sys
|
||||
|
||||
import is64bit
|
||||
import setuptestframework
|
||||
import tryconnection
|
||||
|
||||
print("\nPython", sys.version)
|
||||
node = platform.node()
|
||||
try:
|
||||
print(
|
||||
"node=%s, is64bit.os()= %s, is64bit.Python()= %s"
|
||||
% (node, is64bit.os(), is64bit.Python())
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
if "--help" in sys.argv:
|
||||
print(
|
||||
"""Valid command-line switches are:
|
||||
--package - create a temporary test package, run 2to3 if needed.
|
||||
--all - run all possible tests
|
||||
--time - loop over time format tests (including mxdatetime if present)
|
||||
--nojet - do not test against an ACCESS database file
|
||||
--mssql - test against Microsoft SQL server
|
||||
--pg - test against PostgreSQL
|
||||
--mysql - test against MariaDB
|
||||
--remote= - test unsing remote server at= (experimental)
|
||||
"""
|
||||
)
|
||||
exit()
|
||||
try:
|
||||
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
|
||||
except:
|
||||
onWindows = False
|
||||
|
||||
# create a random name for temporary table names
|
||||
_alphabet = (
|
||||
"PYFGCRLAOEUIDHTNSQJKXBMWVZ" # why, yes, I do happen to use a dvorak keyboard
|
||||
)
|
||||
tmp = "".join([random.choice(_alphabet) for x in range(9)])
|
||||
mdb_name = "xx_" + tmp + ".mdb" # generate a non-colliding name for the temporary .mdb
|
||||
testfolder = setuptestframework.maketemp()
|
||||
|
||||
if "--package" in sys.argv:
|
||||
# create a new adodbapi module -- running 2to3 if needed.
|
||||
pth = setuptestframework.makeadopackage(testfolder)
|
||||
else:
|
||||
# use the adodbapi module in which this file appears
|
||||
pth = setuptestframework.find_ado_path()
|
||||
if pth not in sys.path:
|
||||
# look here _first_ to find modules
|
||||
sys.path.insert(1, pth)
|
||||
|
||||
proxy_host = None
|
||||
for arg in sys.argv:
|
||||
if arg.startswith("--remote="):
|
||||
proxy_host = arg.split("=")[1]
|
||||
import adodbapi.remote as remote
|
||||
|
||||
break
|
||||
|
||||
|
||||
# function to clean up the temporary folder -- calling program must run this function before exit.
|
||||
cleanup = setuptestframework.getcleanupfunction()
|
||||
try:
|
||||
import adodbapi # will (hopefully) be imported using the "pth" discovered above
|
||||
except SyntaxError:
|
||||
print(
|
||||
'\n* * * Are you trying to run Python2 code using Python3? Re-run this test using the "--package" switch.'
|
||||
)
|
||||
sys.exit(11)
|
||||
try:
|
||||
print(adodbapi.version) # show version
|
||||
except:
|
||||
print('"adodbapi.version" not present or not working.')
|
||||
print(__doc__)
|
||||
|
||||
verbose = False
|
||||
for a in sys.argv:
|
||||
if a.startswith("--verbose"):
|
||||
arg = True
|
||||
try:
|
||||
arg = int(a.split("=")[1])
|
||||
except IndexError:
|
||||
pass
|
||||
adodbapi.adodbapi.verbose = arg
|
||||
verbose = arg
|
||||
|
||||
doAllTests = "--all" in sys.argv
|
||||
doAccessTest = not ("--nojet" in sys.argv)
|
||||
doSqlServerTest = "--mssql" in sys.argv or doAllTests
|
||||
doMySqlTest = "--mysql" in sys.argv or doAllTests
|
||||
doPostgresTest = "--pg" in sys.argv or doAllTests
|
||||
iterateOverTimeTests = ("--time" in sys.argv or doAllTests) and onWindows
|
||||
|
||||
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||
# # start your environment setup here v v v
|
||||
SQL_HOST_NODE = "testsql.2txt.us,1430"
|
||||
|
||||
try: # If mx extensions are installed, use mxDateTime
|
||||
import mx.DateTime
|
||||
|
||||
doMxDateTimeTest = True
|
||||
except:
|
||||
doMxDateTimeTest = False # Requires eGenixMXExtensions
|
||||
|
||||
doTimeTest = True # obsolete python time format
|
||||
|
||||
if doAccessTest:
|
||||
if proxy_host: # determine the (probably remote) database file folder
|
||||
c = {"macro_find_temp_test_path": ["mdb", mdb_name], "proxy_host": proxy_host}
|
||||
else:
|
||||
c = {"mdb": setuptestframework.makemdb(testfolder, mdb_name)}
|
||||
|
||||
# macro definition for keyword "provider" using macro "is64bit" -- see documentation
|
||||
# is64bit will return true for 64 bit versions of Python, so the macro will select the ACE provider
|
||||
# (If running a remote ADO service, this will test the 64-bitedness of the ADO server.)
|
||||
c["macro_is64bit"] = [
|
||||
"provider",
|
||||
"Microsoft.ACE.OLEDB.12.0", # 64 bit provider
|
||||
"Microsoft.Jet.OLEDB.4.0",
|
||||
] # 32 bit provider
|
||||
connStrAccess = "Provider=%(provider)s;Data Source=%(mdb)s" # ;Mode=ReadWrite;Persist Security Info=False;Jet OLEDB:Bypass UserInfo Validation=True"
|
||||
print(
|
||||
" ...Testing ACCESS connection to {} file...".format(
|
||||
c.get("mdb", "remote .mdb")
|
||||
)
|
||||
)
|
||||
doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection(
|
||||
verbose, connStrAccess, 10, **c
|
||||
)
|
||||
|
||||
if doSqlServerTest:
|
||||
c = {
|
||||
"host": SQL_HOST_NODE, # name of computer with SQL Server
|
||||
"database": "adotest",
|
||||
"user": "adotestuser", # None implies Windows security
|
||||
"password": "Sq1234567",
|
||||
# macro definition for keyword "security" using macro "auto_security"
|
||||
"macro_auto_security": "security",
|
||||
"provider": "MSOLEDBSQL; MARS Connection=True",
|
||||
}
|
||||
if proxy_host:
|
||||
c["proxy_host"] = proxy_host
|
||||
connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;"
|
||||
print(" ...Testing MS-SQL login to {}...".format(c["host"]))
|
||||
(
|
||||
doSqlServerTest,
|
||||
connStrSQLServer,
|
||||
dbSqlServerconnect,
|
||||
) = tryconnection.try_connection(verbose, connStr, 30, **c)
|
||||
|
||||
if doMySqlTest:
|
||||
c = {
|
||||
"host": "testmysql.2txt.us",
|
||||
"database": "adodbapitest",
|
||||
"user": "adotest",
|
||||
"password": "12345678",
|
||||
"port": "3330", # note the nonstandard port for obfuscation
|
||||
"driver": "MySQL ODBC 5.1 Driver",
|
||||
} # or _driver="MySQL ODBC 3.51 Driver
|
||||
if proxy_host:
|
||||
c["proxy_host"] = proxy_host
|
||||
c["macro_is64bit"] = [
|
||||
"provider",
|
||||
"Provider=MSDASQL;",
|
||||
] # turn on the 64 bit ODBC adapter only if needed
|
||||
cs = (
|
||||
"%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3330;"
|
||||
+ "Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;"
|
||||
)
|
||||
print(" ...Testing MySql login to {}...".format(c["host"]))
|
||||
doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection(
|
||||
verbose, cs, 5, **c
|
||||
)
|
||||
|
||||
|
||||
if doPostgresTest:
|
||||
_computername = "testpg.2txt.us"
|
||||
_databasename = "adotest"
|
||||
_username = "adotestuser"
|
||||
_password = "12345678"
|
||||
kws = {"timeout": 4}
|
||||
kws["macro_is64bit"] = [
|
||||
"prov_drv",
|
||||
"Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}",
|
||||
"Driver=PostgreSQL Unicode",
|
||||
]
|
||||
# get driver from http://www.postgresql.org/ftp/odbc/versions/
|
||||
# test using positional and keyword arguments (bad example for real code)
|
||||
if proxy_host:
|
||||
kws["proxy_host"] = proxy_host
|
||||
print(" ...Testing PostgreSQL login to {}...".format(_computername))
|
||||
doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection(
|
||||
verbose,
|
||||
"%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;port=5430;", # note nonstandard port
|
||||
_username,
|
||||
_password,
|
||||
_computername,
|
||||
_databasename,
|
||||
**kws
|
||||
)
|
||||
|
||||
assert (
|
||||
doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest
|
||||
), "No database engine found for testing"
|
939
lib/adodbapi/test/dbapi20.py
Normal file
939
lib/adodbapi/test/dbapi20.py
Normal file
|
@ -0,0 +1,939 @@
|
|||
#!/usr/bin/env python
|
||||
""" Python DB API 2.0 driver compliance unit test suite.
|
||||
|
||||
This software is Public Domain and may be used without restrictions.
|
||||
|
||||
"Now we have booze and barflies entering the discussion, plus rumours of
|
||||
DBAs on drugs... and I won't tell you what flashes through my mind each
|
||||
time I read the subject line with 'Anal Compliance' in it. All around
|
||||
this is turning out to be a thoroughly unwholesome unit test."
|
||||
|
||||
-- Ian Bicking
|
||||
"""
|
||||
|
||||
__version__ = "$Revision: 1.15.0 $"[11:-2]
|
||||
__author__ = "Stuart Bishop <stuart@stuartbishop.net>"
|
||||
|
||||
import sys
|
||||
import time
|
||||
import unittest
|
||||
|
||||
if sys.version[0] >= "3": # python 3.x
|
||||
_BaseException = Exception
|
||||
|
||||
def _failUnless(self, expr, msg=None):
|
||||
self.assertTrue(expr, msg)
|
||||
|
||||
else: # python 2.x
|
||||
from exceptions import Exception as _BaseException
|
||||
|
||||
def _failUnless(self, expr, msg=None):
|
||||
self.failUnless(expr, msg) ## deprecated since Python 2.6
|
||||
|
||||
|
||||
# set this to "True" to follow API 2.0 to the letter
|
||||
TEST_FOR_NON_IDEMPOTENT_CLOSE = False
|
||||
|
||||
# Revision 1.15 2019/11/22 00:50:00 kf7xm
|
||||
# Make Turn off IDEMPOTENT_CLOSE a proper skipTest
|
||||
|
||||
# Revision 1.14 2013/05/20 11:02:05 kf7xm
|
||||
# Add a literal string to the format insertion test to catch trivial re-format algorithms
|
||||
|
||||
# Revision 1.13 2013/05/08 14:31:50 kf7xm
|
||||
# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure
|
||||
|
||||
|
||||
# Revision 1.12 2009/02/06 03:35:11 kf7xm
|
||||
# Tested okay with Python 3.0, includes last minute patches from Mark H.
|
||||
#
|
||||
# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole
|
||||
# Include latest changes from main branch
|
||||
# Updates for py3k
|
||||
#
|
||||
# Revision 1.11 2005/01/02 02:41:01 zenzen
|
||||
# Update author email address
|
||||
#
|
||||
# Revision 1.10 2003/10/09 03:14:14 zenzen
|
||||
# Add test for DB API 2.0 optional extension, where database exceptions
|
||||
# are exposed as attributes on the Connection object.
|
||||
#
|
||||
# Revision 1.9 2003/08/13 01:16:36 zenzen
|
||||
# Minor tweak from Stefan Fleiter
|
||||
#
|
||||
# Revision 1.8 2003/04/10 00:13:25 zenzen
|
||||
# Changes, as per suggestions by M.-A. Lemburg
|
||||
# - Add a table prefix, to ensure namespace collisions can always be avoided
|
||||
#
|
||||
# Revision 1.7 2003/02/26 23:33:37 zenzen
|
||||
# Break out DDL into helper functions, as per request by David Rushby
|
||||
#
|
||||
# Revision 1.6 2003/02/21 03:04:33 zenzen
|
||||
# Stuff from Henrik Ekelund:
|
||||
# added test_None
|
||||
# added test_nextset & hooks
|
||||
#
|
||||
# Revision 1.5 2003/02/17 22:08:43 zenzen
|
||||
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
|
||||
# defaults to 1 & generic cursor.callproc test added
|
||||
#
|
||||
# Revision 1.4 2003/02/15 00:16:33 zenzen
|
||||
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
|
||||
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
|
||||
# - Class renamed
|
||||
# - Now a subclass of TestCase, to avoid requiring the driver stub
|
||||
# to use multiple inheritance
|
||||
# - Reversed the polarity of buggy test in test_description
|
||||
# - Test exception heirarchy correctly
|
||||
# - self.populate is now self._populate(), so if a driver stub
|
||||
# overrides self.ddl1 this change propogates
|
||||
# - VARCHAR columns now have a width, which will hopefully make the
|
||||
# DDL even more portible (this will be reversed if it causes more problems)
|
||||
# - cursor.rowcount being checked after various execute and fetchXXX methods
|
||||
# - Check for fetchall and fetchmany returning empty lists after results
|
||||
# are exhausted (already checking for empty lists if select retrieved
|
||||
# nothing
|
||||
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
|
||||
#
|
||||
def str2bytes(sval):
|
||||
if sys.version_info < (3, 0) and isinstance(sval, str):
|
||||
sval = sval.decode("latin1")
|
||||
return sval.encode("latin1") # python 3 make unicode into bytes
|
||||
|
||||
|
||||
class DatabaseAPI20Test(unittest.TestCase):
|
||||
"""Test a database self.driver for DB API 2.0 compatibility.
|
||||
This implementation tests Gadfly, but the TestCase
|
||||
is structured so that other self.drivers can subclass this
|
||||
test case to ensure compiliance with the DB-API. It is
|
||||
expected that this TestCase may be expanded in the future
|
||||
if ambiguities or edge conditions are discovered.
|
||||
|
||||
The 'Optional Extensions' are not yet being tested.
|
||||
|
||||
self.drivers should subclass this test, overriding setUp, tearDown,
|
||||
self.driver, connect_args and connect_kw_args. Class specification
|
||||
should be as follows:
|
||||
|
||||
import dbapi20
|
||||
class mytest(dbapi20.DatabaseAPI20Test):
|
||||
[...]
|
||||
|
||||
Don't 'import DatabaseAPI20Test from dbapi20', or you will
|
||||
confuse the unit tester - just 'import dbapi20'.
|
||||
"""
|
||||
|
||||
# The self.driver module. This should be the module where the 'connect'
|
||||
# method is to be found
|
||||
driver = None
|
||||
connect_args = () # List of arguments to pass to connect
|
||||
connect_kw_args = {} # Keyword arguments for connect
|
||||
table_prefix = "dbapi20test_" # If you need to specify a prefix for tables
|
||||
|
||||
ddl1 = "create table %sbooze (name varchar(20))" % table_prefix
|
||||
ddl2 = "create table %sbarflys (name varchar(20), drink varchar(30))" % table_prefix
|
||||
xddl1 = "drop table %sbooze" % table_prefix
|
||||
xddl2 = "drop table %sbarflys" % table_prefix
|
||||
|
||||
lowerfunc = "lower" # Name of stored procedure to convert string->lowercase
|
||||
|
||||
# Some drivers may need to override these helpers, for example adding
|
||||
# a 'commit' after the execute.
|
||||
def executeDDL1(self, cursor):
|
||||
cursor.execute(self.ddl1)
|
||||
|
||||
def executeDDL2(self, cursor):
|
||||
cursor.execute(self.ddl2)
|
||||
|
||||
def setUp(self):
|
||||
"""self.drivers should override this method to perform required setup
|
||||
if any is necessary, such as creating the database.
|
||||
"""
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
"""self.drivers should override this method to perform required cleanup
|
||||
if any is necessary, such as deleting the test database.
|
||||
The default drops the tables that may be created.
|
||||
"""
|
||||
try:
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
for ddl in (self.xddl1, self.xddl2):
|
||||
try:
|
||||
cur.execute(ddl)
|
||||
con.commit()
|
||||
except self.driver.Error:
|
||||
# Assume table didn't exist. Other tests will check if
|
||||
# execute is busted.
|
||||
pass
|
||||
finally:
|
||||
con.close()
|
||||
except _BaseException:
|
||||
pass
|
||||
|
||||
def _connect(self):
|
||||
try:
|
||||
r = self.driver.connect(*self.connect_args, **self.connect_kw_args)
|
||||
except AttributeError:
|
||||
self.fail("No connect method found in self.driver module")
|
||||
return r
|
||||
|
||||
def test_connect(self):
|
||||
con = self._connect()
|
||||
con.close()
|
||||
|
||||
def test_apilevel(self):
|
||||
try:
|
||||
# Must exist
|
||||
apilevel = self.driver.apilevel
|
||||
# Must equal 2.0
|
||||
self.assertEqual(apilevel, "2.0")
|
||||
except AttributeError:
|
||||
self.fail("Driver doesn't define apilevel")
|
||||
|
||||
def test_threadsafety(self):
|
||||
try:
|
||||
# Must exist
|
||||
threadsafety = self.driver.threadsafety
|
||||
# Must be a valid value
|
||||
_failUnless(self, threadsafety in (0, 1, 2, 3))
|
||||
except AttributeError:
|
||||
self.fail("Driver doesn't define threadsafety")
|
||||
|
||||
def test_paramstyle(self):
|
||||
try:
|
||||
# Must exist
|
||||
paramstyle = self.driver.paramstyle
|
||||
# Must be a valid value
|
||||
_failUnless(
|
||||
self, paramstyle in ("qmark", "numeric", "named", "format", "pyformat")
|
||||
)
|
||||
except AttributeError:
|
||||
self.fail("Driver doesn't define paramstyle")
|
||||
|
||||
def test_Exceptions(self):
|
||||
# Make sure required exceptions exist, and are in the
|
||||
# defined heirarchy.
|
||||
if sys.version[0] == "3": # under Python 3 StardardError no longer exists
|
||||
self.assertTrue(issubclass(self.driver.Warning, Exception))
|
||||
self.assertTrue(issubclass(self.driver.Error, Exception))
|
||||
else:
|
||||
self.failUnless(issubclass(self.driver.Warning, Exception))
|
||||
self.failUnless(issubclass(self.driver.Error, Exception))
|
||||
|
||||
_failUnless(self, issubclass(self.driver.InterfaceError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.DatabaseError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.OperationalError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.IntegrityError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.InternalError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.ProgrammingError, self.driver.Error))
|
||||
_failUnless(self, issubclass(self.driver.NotSupportedError, self.driver.Error))
|
||||
|
||||
def test_ExceptionsAsConnectionAttributes(self):
|
||||
# OPTIONAL EXTENSION
|
||||
# Test for the optional DB API 2.0 extension, where the exceptions
|
||||
# are exposed as attributes on the Connection object
|
||||
# I figure this optional extension will be implemented by any
|
||||
# driver author who is using this test suite, so it is enabled
|
||||
# by default.
|
||||
con = self._connect()
|
||||
drv = self.driver
|
||||
_failUnless(self, con.Warning is drv.Warning)
|
||||
_failUnless(self, con.Error is drv.Error)
|
||||
_failUnless(self, con.InterfaceError is drv.InterfaceError)
|
||||
_failUnless(self, con.DatabaseError is drv.DatabaseError)
|
||||
_failUnless(self, con.OperationalError is drv.OperationalError)
|
||||
_failUnless(self, con.IntegrityError is drv.IntegrityError)
|
||||
_failUnless(self, con.InternalError is drv.InternalError)
|
||||
_failUnless(self, con.ProgrammingError is drv.ProgrammingError)
|
||||
_failUnless(self, con.NotSupportedError is drv.NotSupportedError)
|
||||
|
||||
def test_commit(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
# Commit must work, even if it doesn't do anything
|
||||
con.commit()
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_rollback(self):
|
||||
con = self._connect()
|
||||
# If rollback is defined, it should either work or throw
|
||||
# the documented exception
|
||||
if hasattr(con, "rollback"):
|
||||
try:
|
||||
con.rollback()
|
||||
except self.driver.NotSupportedError:
|
||||
pass
|
||||
|
||||
def test_cursor(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_cursor_isolation(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
# Make sure cursors created from the same connection have
|
||||
# the documented transaction isolation level
|
||||
cur1 = con.cursor()
|
||||
cur2 = con.cursor()
|
||||
self.executeDDL1(cur1)
|
||||
cur1.execute(
|
||||
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||
)
|
||||
cur2.execute("select name from %sbooze" % self.table_prefix)
|
||||
booze = cur2.fetchall()
|
||||
self.assertEqual(len(booze), 1)
|
||||
self.assertEqual(len(booze[0]), 1)
|
||||
self.assertEqual(booze[0][0], "Victoria Bitter")
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_description(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self.executeDDL1(cur)
|
||||
self.assertEqual(
|
||||
cur.description,
|
||||
None,
|
||||
"cursor.description should be none after executing a "
|
||||
"statement that can return no rows (such as DDL)",
|
||||
)
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
self.assertEqual(
|
||||
len(cur.description), 1, "cursor.description describes too many columns"
|
||||
)
|
||||
self.assertEqual(
|
||||
len(cur.description[0]),
|
||||
7,
|
||||
"cursor.description[x] tuples must have 7 elements",
|
||||
)
|
||||
self.assertEqual(
|
||||
cur.description[0][0].lower(),
|
||||
"name",
|
||||
"cursor.description[x][0] must return column name",
|
||||
)
|
||||
self.assertEqual(
|
||||
cur.description[0][1],
|
||||
self.driver.STRING,
|
||||
"cursor.description[x][1] must return column type. Got %r"
|
||||
% cur.description[0][1],
|
||||
)
|
||||
|
||||
# Make sure self.description gets reset
|
||||
self.executeDDL2(cur)
|
||||
self.assertEqual(
|
||||
cur.description,
|
||||
None,
|
||||
"cursor.description not being set to None when executing "
|
||||
"no-result statements (eg. DDL)",
|
||||
)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_rowcount(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self.executeDDL1(cur)
|
||||
_failUnless(
|
||||
self,
|
||||
cur.rowcount in (-1, 0), # Bug #543885
|
||||
"cursor.rowcount should be -1 or 0 after executing no-result "
|
||||
"statements",
|
||||
)
|
||||
cur.execute(
|
||||
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||
)
|
||||
_failUnless(
|
||||
self,
|
||||
cur.rowcount in (-1, 1),
|
||||
"cursor.rowcount should == number or rows inserted, or "
|
||||
"set to -1 after executing an insert statement",
|
||||
)
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
_failUnless(
|
||||
self,
|
||||
cur.rowcount in (-1, 1),
|
||||
"cursor.rowcount should == number of rows returned, or "
|
||||
"set to -1 after executing a select statement",
|
||||
)
|
||||
self.executeDDL2(cur)
|
||||
self.assertEqual(
|
||||
cur.rowcount,
|
||||
-1,
|
||||
"cursor.rowcount not being reset to -1 after executing "
|
||||
"no-result statements",
|
||||
)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
lower_func = "lower"
|
||||
|
||||
def test_callproc(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
if self.lower_func and hasattr(cur, "callproc"):
|
||||
r = cur.callproc(self.lower_func, ("FOO",))
|
||||
self.assertEqual(len(r), 1)
|
||||
self.assertEqual(r[0], "FOO")
|
||||
r = cur.fetchall()
|
||||
self.assertEqual(len(r), 1, "callproc produced no result set")
|
||||
self.assertEqual(len(r[0]), 1, "callproc produced invalid result set")
|
||||
self.assertEqual(r[0][0], "foo", "callproc produced invalid results")
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_close(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
# cursor.execute should raise an Error if called after connection
|
||||
# closed
|
||||
self.assertRaises(self.driver.Error, self.executeDDL1, cur)
|
||||
|
||||
# connection.commit should raise an Error if called after connection'
|
||||
# closed.'
|
||||
self.assertRaises(self.driver.Error, con.commit)
|
||||
|
||||
# connection.close should raise an Error if called more than once
|
||||
#!!! reasonable persons differ about the usefulness of this test and this feature !!!
|
||||
if TEST_FOR_NON_IDEMPOTENT_CLOSE:
|
||||
self.assertRaises(self.driver.Error, con.close)
|
||||
else:
|
||||
self.skipTest(
|
||||
"Non-idempotent close is considered a bad thing by some people."
|
||||
)
|
||||
|
||||
def test_execute(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self._paraminsert(cur)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def _paraminsert(self, cur):
|
||||
self.executeDDL2(cur)
|
||||
cur.execute(
|
||||
"insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% (self.table_prefix)
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 1))
|
||||
|
||||
if self.driver.paramstyle == "qmark":
|
||||
cur.execute(
|
||||
"insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% self.table_prefix,
|
||||
("Cooper's",),
|
||||
)
|
||||
elif self.driver.paramstyle == "numeric":
|
||||
cur.execute(
|
||||
"insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% self.table_prefix,
|
||||
("Cooper's",),
|
||||
)
|
||||
elif self.driver.paramstyle == "named":
|
||||
cur.execute(
|
||||
"insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% self.table_prefix,
|
||||
{"beer": "Cooper's"},
|
||||
)
|
||||
elif self.driver.paramstyle == "format":
|
||||
cur.execute(
|
||||
"insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% self.table_prefix,
|
||||
("Cooper's",),
|
||||
)
|
||||
elif self.driver.paramstyle == "pyformat":
|
||||
cur.execute(
|
||||
"insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||
% self.table_prefix,
|
||||
{"beer": "Cooper's"},
|
||||
)
|
||||
else:
|
||||
self.fail("Invalid paramstyle")
|
||||
_failUnless(self, cur.rowcount in (-1, 1))
|
||||
|
||||
cur.execute("select name, drink from %sbarflys" % self.table_prefix)
|
||||
res = cur.fetchall()
|
||||
self.assertEqual(len(res), 2, "cursor.fetchall returned too few rows")
|
||||
beers = [res[0][0], res[1][0]]
|
||||
beers.sort()
|
||||
self.assertEqual(
|
||||
beers[0],
|
||||
"Cooper's",
|
||||
"cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly",
|
||||
)
|
||||
self.assertEqual(
|
||||
beers[1],
|
||||
"Victoria Bitter",
|
||||
"cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly",
|
||||
)
|
||||
trouble = "thi%s :may ca%(u)se? troub:1e"
|
||||
self.assertEqual(
|
||||
res[0][1],
|
||||
trouble,
|
||||
"cursor.fetchall retrieved incorrect data, or data inserted "
|
||||
"incorrectly. Got=%s, Expected=%s" % (repr(res[0][1]), repr(trouble)),
|
||||
)
|
||||
self.assertEqual(
|
||||
res[1][1],
|
||||
trouble,
|
||||
"cursor.fetchall retrieved incorrect data, or data inserted "
|
||||
"incorrectly. Got=%s, Expected=%s" % (repr(res[1][1]), repr(trouble)),
|
||||
)
|
||||
|
||||
def test_executemany(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self.executeDDL1(cur)
|
||||
largs = [("Cooper's",), ("Boag's",)]
|
||||
margs = [{"beer": "Cooper's"}, {"beer": "Boag's"}]
|
||||
if self.driver.paramstyle == "qmark":
|
||||
cur.executemany(
|
||||
"insert into %sbooze values (?)" % self.table_prefix, largs
|
||||
)
|
||||
elif self.driver.paramstyle == "numeric":
|
||||
cur.executemany(
|
||||
"insert into %sbooze values (:1)" % self.table_prefix, largs
|
||||
)
|
||||
elif self.driver.paramstyle == "named":
|
||||
cur.executemany(
|
||||
"insert into %sbooze values (:beer)" % self.table_prefix, margs
|
||||
)
|
||||
elif self.driver.paramstyle == "format":
|
||||
cur.executemany(
|
||||
"insert into %sbooze values (%%s)" % self.table_prefix, largs
|
||||
)
|
||||
elif self.driver.paramstyle == "pyformat":
|
||||
cur.executemany(
|
||||
"insert into %sbooze values (%%(beer)s)" % (self.table_prefix),
|
||||
margs,
|
||||
)
|
||||
else:
|
||||
self.fail("Unknown paramstyle")
|
||||
_failUnless(
|
||||
self,
|
||||
cur.rowcount in (-1, 2),
|
||||
"insert using cursor.executemany set cursor.rowcount to "
|
||||
"incorrect value %r" % cur.rowcount,
|
||||
)
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
res = cur.fetchall()
|
||||
self.assertEqual(
|
||||
len(res), 2, "cursor.fetchall retrieved incorrect number of rows"
|
||||
)
|
||||
beers = [res[0][0], res[1][0]]
|
||||
beers.sort()
|
||||
self.assertEqual(
|
||||
beers[0], "Boag's", 'incorrect data "%s" retrieved' % beers[0]
|
||||
)
|
||||
self.assertEqual(beers[1], "Cooper's", "incorrect data retrieved")
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_fetchone(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
|
||||
# cursor.fetchone should raise an Error if called before
|
||||
# executing a select-type query
|
||||
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||
|
||||
# cursor.fetchone should raise an Error if called after
|
||||
# executing a query that cannnot return rows
|
||||
self.executeDDL1(cur)
|
||||
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
self.assertEqual(
|
||||
cur.fetchone(),
|
||||
None,
|
||||
"cursor.fetchone should return None if a query retrieves " "no rows",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 0))
|
||||
|
||||
# cursor.fetchone should raise an Error if called after
|
||||
# executing a query that cannnot return rows
|
||||
cur.execute(
|
||||
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||
)
|
||||
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
r = cur.fetchone()
|
||||
self.assertEqual(
|
||||
len(r), 1, "cursor.fetchone should have retrieved a single row"
|
||||
)
|
||||
self.assertEqual(
|
||||
r[0], "Victoria Bitter", "cursor.fetchone retrieved incorrect data"
|
||||
)
|
||||
self.assertEqual(
|
||||
cur.fetchone(),
|
||||
None,
|
||||
"cursor.fetchone should return None if no more rows available",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 1))
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
samples = [
|
||||
"Carlton Cold",
|
||||
"Carlton Draft",
|
||||
"Mountain Goat",
|
||||
"Redback",
|
||||
"Victoria Bitter",
|
||||
"XXXX",
|
||||
]
|
||||
|
||||
def _populate(self):
|
||||
"""Return a list of sql commands to setup the DB for the fetch
|
||||
tests.
|
||||
"""
|
||||
populate = [
|
||||
"insert into %sbooze values ('%s')" % (self.table_prefix, s)
|
||||
for s in self.samples
|
||||
]
|
||||
return populate
|
||||
|
||||
def test_fetchmany(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
|
||||
# cursor.fetchmany should raise an Error if called without
|
||||
# issuing a query
|
||||
self.assertRaises(self.driver.Error, cur.fetchmany, 4)
|
||||
|
||||
self.executeDDL1(cur)
|
||||
for sql in self._populate():
|
||||
cur.execute(sql)
|
||||
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
r = cur.fetchmany()
|
||||
self.assertEqual(
|
||||
len(r),
|
||||
1,
|
||||
"cursor.fetchmany retrieved incorrect number of rows, "
|
||||
"default of arraysize is one.",
|
||||
)
|
||||
cur.arraysize = 10
|
||||
r = cur.fetchmany(3) # Should get 3 rows
|
||||
self.assertEqual(
|
||||
len(r), 3, "cursor.fetchmany retrieved incorrect number of rows"
|
||||
)
|
||||
r = cur.fetchmany(4) # Should get 2 more
|
||||
self.assertEqual(
|
||||
len(r), 2, "cursor.fetchmany retrieved incorrect number of rows"
|
||||
)
|
||||
r = cur.fetchmany(4) # Should be an empty sequence
|
||||
self.assertEqual(
|
||||
len(r),
|
||||
0,
|
||||
"cursor.fetchmany should return an empty sequence after "
|
||||
"results are exhausted",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 6))
|
||||
|
||||
# Same as above, using cursor.arraysize
|
||||
cur.arraysize = 4
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
r = cur.fetchmany() # Should get 4 rows
|
||||
self.assertEqual(
|
||||
len(r), 4, "cursor.arraysize not being honoured by fetchmany"
|
||||
)
|
||||
r = cur.fetchmany() # Should get 2 more
|
||||
self.assertEqual(len(r), 2)
|
||||
r = cur.fetchmany() # Should be an empty sequence
|
||||
self.assertEqual(len(r), 0)
|
||||
_failUnless(self, cur.rowcount in (-1, 6))
|
||||
|
||||
cur.arraysize = 6
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
rows = cur.fetchmany() # Should get all rows
|
||||
_failUnless(self, cur.rowcount in (-1, 6))
|
||||
self.assertEqual(len(rows), 6)
|
||||
self.assertEqual(len(rows), 6)
|
||||
rows = [r[0] for r in rows]
|
||||
rows.sort()
|
||||
|
||||
# Make sure we get the right data back out
|
||||
for i in range(0, 6):
|
||||
self.assertEqual(
|
||||
rows[i],
|
||||
self.samples[i],
|
||||
"incorrect data retrieved by cursor.fetchmany",
|
||||
)
|
||||
|
||||
rows = cur.fetchmany() # Should return an empty list
|
||||
self.assertEqual(
|
||||
len(rows),
|
||||
0,
|
||||
"cursor.fetchmany should return an empty sequence if "
|
||||
"called after the whole result set has been fetched",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 6))
|
||||
|
||||
self.executeDDL2(cur)
|
||||
cur.execute("select name from %sbarflys" % self.table_prefix)
|
||||
r = cur.fetchmany() # Should get empty sequence
|
||||
self.assertEqual(
|
||||
len(r),
|
||||
0,
|
||||
"cursor.fetchmany should return an empty sequence if "
|
||||
"query retrieved no rows",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, 0))
|
||||
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_fetchall(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
# cursor.fetchall should raise an Error if called
|
||||
# without executing a query that may return rows (such
|
||||
# as a select)
|
||||
self.assertRaises(self.driver.Error, cur.fetchall)
|
||||
|
||||
self.executeDDL1(cur)
|
||||
for sql in self._populate():
|
||||
cur.execute(sql)
|
||||
|
||||
# cursor.fetchall should raise an Error if called
|
||||
# after executing a a statement that cannot return rows
|
||||
self.assertRaises(self.driver.Error, cur.fetchall)
|
||||
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
rows = cur.fetchall()
|
||||
_failUnless(self, cur.rowcount in (-1, len(self.samples)))
|
||||
self.assertEqual(
|
||||
len(rows),
|
||||
len(self.samples),
|
||||
"cursor.fetchall did not retrieve all rows",
|
||||
)
|
||||
rows = [r[0] for r in rows]
|
||||
rows.sort()
|
||||
for i in range(0, len(self.samples)):
|
||||
self.assertEqual(
|
||||
rows[i], self.samples[i], "cursor.fetchall retrieved incorrect rows"
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
self.assertEqual(
|
||||
len(rows),
|
||||
0,
|
||||
"cursor.fetchall should return an empty list if called "
|
||||
"after the whole result set has been fetched",
|
||||
)
|
||||
_failUnless(self, cur.rowcount in (-1, len(self.samples)))
|
||||
|
||||
self.executeDDL2(cur)
|
||||
cur.execute("select name from %sbarflys" % self.table_prefix)
|
||||
rows = cur.fetchall()
|
||||
_failUnless(self, cur.rowcount in (-1, 0))
|
||||
self.assertEqual(
|
||||
len(rows),
|
||||
0,
|
||||
"cursor.fetchall should return an empty list if "
|
||||
"a select query returns no rows",
|
||||
)
|
||||
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_mixedfetch(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self.executeDDL1(cur)
|
||||
for sql in self._populate():
|
||||
cur.execute(sql)
|
||||
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
rows1 = cur.fetchone()
|
||||
rows23 = cur.fetchmany(2)
|
||||
rows4 = cur.fetchone()
|
||||
rows56 = cur.fetchall()
|
||||
_failUnless(self, cur.rowcount in (-1, 6))
|
||||
self.assertEqual(
|
||||
len(rows23), 2, "fetchmany returned incorrect number of rows"
|
||||
)
|
||||
self.assertEqual(
|
||||
len(rows56), 2, "fetchall returned incorrect number of rows"
|
||||
)
|
||||
|
||||
rows = [rows1[0]]
|
||||
rows.extend([rows23[0][0], rows23[1][0]])
|
||||
rows.append(rows4[0])
|
||||
rows.extend([rows56[0][0], rows56[1][0]])
|
||||
rows.sort()
|
||||
for i in range(0, len(self.samples)):
|
||||
self.assertEqual(
|
||||
rows[i], self.samples[i], "incorrect data retrieved or inserted"
|
||||
)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def help_nextset_setUp(self, cur):
|
||||
"""Should create a procedure called deleteme
|
||||
that returns two result sets, first the
|
||||
number of rows in booze then "name from booze"
|
||||
"""
|
||||
raise NotImplementedError("Helper not implemented")
|
||||
# sql="""
|
||||
# create procedure deleteme as
|
||||
# begin
|
||||
# select count(*) from booze
|
||||
# select name from booze
|
||||
# end
|
||||
# """
|
||||
# cur.execute(sql)
|
||||
|
||||
def help_nextset_tearDown(self, cur):
|
||||
"If cleaning up is needed after nextSetTest"
|
||||
raise NotImplementedError("Helper not implemented")
|
||||
# cur.execute("drop procedure deleteme")
|
||||
|
||||
def test_nextset(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
if not hasattr(cur, "nextset"):
|
||||
return
|
||||
|
||||
try:
|
||||
self.executeDDL1(cur)
|
||||
sql = self._populate()
|
||||
for sql in self._populate():
|
||||
cur.execute(sql)
|
||||
|
||||
self.help_nextset_setUp(cur)
|
||||
|
||||
cur.callproc("deleteme")
|
||||
numberofrows = cur.fetchone()
|
||||
assert numberofrows[0] == len(self.samples)
|
||||
assert cur.nextset()
|
||||
names = cur.fetchall()
|
||||
assert len(names) == len(self.samples)
|
||||
s = cur.nextset()
|
||||
assert s == None, "No more return sets, should return None"
|
||||
finally:
|
||||
self.help_nextset_tearDown(cur)
|
||||
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_nextset(self):
|
||||
raise NotImplementedError("Drivers need to override this test")
|
||||
|
||||
def test_arraysize(self):
|
||||
# Not much here - rest of the tests for this are in test_fetchmany
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
_failUnless(
|
||||
self, hasattr(cur, "arraysize"), "cursor.arraysize must be defined"
|
||||
)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_setinputsizes(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
cur.setinputsizes((25,))
|
||||
self._paraminsert(cur) # Make sure cursor still works
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_setoutputsize_basic(self):
|
||||
# Basic test is to make sure setoutputsize doesn't blow up
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
cur.setoutputsize(1000)
|
||||
cur.setoutputsize(2000, 0)
|
||||
self._paraminsert(cur) # Make sure the cursor still works
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_setoutputsize(self):
|
||||
# Real test for setoutputsize is driver dependant
|
||||
raise NotImplementedError("Driver needed to override this test")
|
||||
|
||||
def test_None(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
self.executeDDL1(cur)
|
||||
cur.execute("insert into %sbooze values (NULL)" % self.table_prefix)
|
||||
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||
r = cur.fetchall()
|
||||
self.assertEqual(len(r), 1)
|
||||
self.assertEqual(len(r[0]), 1)
|
||||
self.assertEqual(r[0][0], None, "NULL value not returned as None")
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_Date(self):
|
||||
d1 = self.driver.Date(2002, 12, 25)
|
||||
d2 = self.driver.DateFromTicks(time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0)))
|
||||
# Can we assume this? API doesn't specify, but it seems implied
|
||||
# self.assertEqual(str(d1),str(d2))
|
||||
|
||||
def test_Time(self):
|
||||
t1 = self.driver.Time(13, 45, 30)
|
||||
t2 = self.driver.TimeFromTicks(time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0)))
|
||||
# Can we assume this? API doesn't specify, but it seems implied
|
||||
# self.assertEqual(str(t1),str(t2))
|
||||
|
||||
def test_Timestamp(self):
|
||||
t1 = self.driver.Timestamp(2002, 12, 25, 13, 45, 30)
|
||||
t2 = self.driver.TimestampFromTicks(
|
||||
time.mktime((2002, 12, 25, 13, 45, 30, 0, 0, 0))
|
||||
)
|
||||
# Can we assume this? API doesn't specify, but it seems implied
|
||||
# self.assertEqual(str(t1),str(t2))
|
||||
|
||||
def test_Binary(self):
|
||||
b = self.driver.Binary(str2bytes("Something"))
|
||||
b = self.driver.Binary(str2bytes(""))
|
||||
|
||||
def test_STRING(self):
|
||||
_failUnless(
|
||||
self, hasattr(self.driver, "STRING"), "module.STRING must be defined"
|
||||
)
|
||||
|
||||
def test_BINARY(self):
|
||||
_failUnless(
|
||||
self, hasattr(self.driver, "BINARY"), "module.BINARY must be defined."
|
||||
)
|
||||
|
||||
def test_NUMBER(self):
|
||||
_failUnless(
|
||||
self, hasattr(self.driver, "NUMBER"), "module.NUMBER must be defined."
|
||||
)
|
||||
|
||||
def test_DATETIME(self):
|
||||
_failUnless(
|
||||
self, hasattr(self.driver, "DATETIME"), "module.DATETIME must be defined."
|
||||
)
|
||||
|
||||
def test_ROWID(self):
|
||||
_failUnless(
|
||||
self, hasattr(self.driver, "ROWID"), "module.ROWID must be defined."
|
||||
)
|
41
lib/adodbapi/test/is64bit.py
Normal file
41
lib/adodbapi/test/is64bit.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
|
||||
import sys
|
||||
|
||||
|
||||
def Python():
|
||||
if sys.platform == "cli": # IronPython
|
||||
import System
|
||||
|
||||
return System.IntPtr.Size == 8
|
||||
else:
|
||||
try:
|
||||
return sys.maxsize > 2147483647
|
||||
except AttributeError:
|
||||
return sys.maxint > 2147483647
|
||||
|
||||
|
||||
def os():
|
||||
import platform
|
||||
|
||||
pm = platform.machine()
|
||||
if pm != ".." and pm.endswith("64"): # recent Python (not Iron)
|
||||
return True
|
||||
else:
|
||||
import os
|
||||
|
||||
if "PROCESSOR_ARCHITEW6432" in os.environ:
|
||||
return True # 32 bit program running on 64 bit Windows
|
||||
try:
|
||||
return os.environ["PROCESSOR_ARCHITECTURE"].endswith(
|
||||
"64"
|
||||
) # 64 bit Windows 64 bit program
|
||||
except IndexError:
|
||||
pass # not Windows
|
||||
try:
|
||||
return "64" in platform.architecture()[0] # this often works in Linux
|
||||
except:
|
||||
return False # is an older version of Python, assume also an older os (best we can guess)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("is64bit.Python() =", Python(), "is64bit.os() =", os())
|
134
lib/adodbapi/test/setuptestframework.py
Normal file
134
lib/adodbapi/test/setuptestframework.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
#!/usr/bin/python2
|
||||
# Configure this in order to run the testcases.
|
||||
"setuptestframework.py v 2.6.0.8"
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
OSErrors = (WindowsError, OSError)
|
||||
except NameError: # not running on Windows
|
||||
OSErrors = OSError
|
||||
|
||||
|
||||
def maketemp():
|
||||
temphome = tempfile.gettempdir()
|
||||
tempdir = os.path.join(temphome, "adodbapi_test")
|
||||
try:
|
||||
os.mkdir(tempdir)
|
||||
except:
|
||||
pass
|
||||
return tempdir
|
||||
|
||||
|
||||
def _cleanup_function(testfolder, mdb_name):
|
||||
try:
|
||||
os.unlink(os.path.join(testfolder, mdb_name))
|
||||
except:
|
||||
pass # mdb database not present
|
||||
try:
|
||||
shutil.rmtree(testfolder)
|
||||
print(" cleaned up folder", testfolder)
|
||||
except:
|
||||
pass # test package not present
|
||||
|
||||
|
||||
def getcleanupfunction():
|
||||
return _cleanup_function
|
||||
|
||||
|
||||
def find_ado_path():
|
||||
adoName = os.path.normpath(os.getcwd() + "/../../adodbapi.py")
|
||||
adoPackage = os.path.dirname(adoName)
|
||||
return adoPackage
|
||||
|
||||
|
||||
# make a new package directory for the test copy of ado
|
||||
def makeadopackage(testfolder):
|
||||
adoName = os.path.normpath(os.getcwd() + "/../adodbapi.py")
|
||||
adoPath = os.path.dirname(adoName)
|
||||
if os.path.exists(adoName):
|
||||
newpackage = os.path.join(testfolder, "adodbapi")
|
||||
try:
|
||||
os.mkdir(newpackage)
|
||||
except OSErrors:
|
||||
print(
|
||||
"*Note: temporary adodbapi package already exists: may be two versions running?"
|
||||
)
|
||||
for f in os.listdir(adoPath):
|
||||
if f.endswith(".py"):
|
||||
shutil.copy(os.path.join(adoPath, f), newpackage)
|
||||
if sys.version_info >= (3, 0): # only when running Py3.n
|
||||
save = sys.stdout
|
||||
sys.stdout = None
|
||||
from lib2to3.main import main # use 2to3 to make test package
|
||||
|
||||
main("lib2to3.fixes", args=["-n", "-w", newpackage])
|
||||
sys.stdout = save
|
||||
return testfolder
|
||||
else:
|
||||
raise EnvironmentError("Connot find source of adodbapi to test.")
|
||||
|
||||
|
||||
def makemdb(testfolder, mdb_name):
|
||||
# following setup code borrowed from pywin32 odbc test suite
|
||||
# kindly contributed by Frank Millman.
|
||||
import os
|
||||
|
||||
_accessdatasource = os.path.join(testfolder, mdb_name)
|
||||
if os.path.isfile(_accessdatasource):
|
||||
print("using JET database=", _accessdatasource)
|
||||
else:
|
||||
try:
|
||||
from win32com.client import constants
|
||||
from win32com.client.gencache import EnsureDispatch
|
||||
|
||||
win32 = True
|
||||
except ImportError: # perhaps we are running IronPython
|
||||
win32 = False # iron Python
|
||||
try:
|
||||
from System import Activator, Type
|
||||
except:
|
||||
pass
|
||||
|
||||
# Create a brand-new database - what is the story with these?
|
||||
dbe = None
|
||||
for suffix in (".36", ".35", ".30"):
|
||||
try:
|
||||
if win32:
|
||||
dbe = EnsureDispatch("DAO.DBEngine" + suffix)
|
||||
else:
|
||||
type = Type.GetTypeFromProgID("DAO.DBEngine" + suffix)
|
||||
dbe = Activator.CreateInstance(type)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
if dbe:
|
||||
print(" ...Creating ACCESS db at " + _accessdatasource)
|
||||
if win32:
|
||||
workspace = dbe.Workspaces(0)
|
||||
newdb = workspace.CreateDatabase(
|
||||
_accessdatasource, constants.dbLangGeneral, constants.dbVersion40
|
||||
)
|
||||
else:
|
||||
newdb = dbe.CreateDatabase(
|
||||
_accessdatasource, ";LANGID=0x0409;CP=1252;COUNTRY=0"
|
||||
)
|
||||
newdb.Close()
|
||||
else:
|
||||
print(" ...copying test ACCESS db to " + _accessdatasource)
|
||||
mdbName = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "..", "examples", "test.mdb")
|
||||
)
|
||||
import shutil
|
||||
|
||||
shutil.copy(mdbName, _accessdatasource)
|
||||
|
||||
return _accessdatasource
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Setting up a Jet database for server to use for remote testing...")
|
||||
temp = maketemp()
|
||||
makemdb(temp, "server_test.mdb")
|
200
lib/adodbapi/test/test_adodbapi_dbapi20.py
Normal file
200
lib/adodbapi/test/test_adodbapi_dbapi20.py
Normal file
|
@ -0,0 +1,200 @@
|
|||
print("This module depends on the dbapi20 compliance tests created by Stuart Bishop")
|
||||
print("(see db-sig mailing list history for info)")
|
||||
import platform
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import dbapi20
|
||||
import setuptestframework
|
||||
|
||||
testfolder = setuptestframework.maketemp()
|
||||
if "--package" in sys.argv:
|
||||
pth = setuptestframework.makeadopackage(testfolder)
|
||||
sys.argv.remove("--package")
|
||||
else:
|
||||
pth = setuptestframework.find_ado_path()
|
||||
if pth not in sys.path:
|
||||
sys.path.insert(1, pth)
|
||||
# function to clean up the temporary folder -- calling program must run this function before exit.
|
||||
cleanup = setuptestframework.getcleanupfunction()
|
||||
|
||||
import adodbapi
|
||||
import adodbapi.is64bit as is64bit
|
||||
|
||||
db = adodbapi
|
||||
|
||||
if "--verbose" in sys.argv:
|
||||
db.adodbapi.verbose = 3
|
||||
|
||||
print(adodbapi.version)
|
||||
print("Tested with dbapi20 %s" % dbapi20.__version__)
|
||||
|
||||
try:
|
||||
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
|
||||
except:
|
||||
onWindows = False
|
||||
|
||||
node = platform.node()
|
||||
|
||||
conn_kws = {}
|
||||
host = "testsql.2txt.us,1430" # if None, will use macro to fill in node name
|
||||
instance = r"%s\SQLEXPRESS"
|
||||
conn_kws["name"] = "adotest"
|
||||
|
||||
conn_kws["user"] = "adotestuser" # None implies Windows security
|
||||
conn_kws["password"] = "Sq1234567"
|
||||
# macro definition for keyword "security" using macro "auto_security"
|
||||
conn_kws["macro_auto_security"] = "security"
|
||||
|
||||
if host is None:
|
||||
conn_kws["macro_getnode"] = ["host", instance]
|
||||
else:
|
||||
conn_kws["host"] = host
|
||||
|
||||
conn_kws[
|
||||
"provider"
|
||||
] = "Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;"
|
||||
connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s"
|
||||
|
||||
if onWindows and node != "z-PC":
|
||||
pass # default should make a local SQL Server connection
|
||||
elif node == "xxx": # try Postgres database
|
||||
_computername = "25.223.161.222"
|
||||
_databasename = "adotest"
|
||||
_username = "adotestuser"
|
||||
_password = "12345678"
|
||||
_driver = "PostgreSQL Unicode"
|
||||
_provider = ""
|
||||
connStr = "%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;" % (
|
||||
_provider,
|
||||
_driver,
|
||||
_computername,
|
||||
_databasename,
|
||||
_username,
|
||||
_password,
|
||||
)
|
||||
elif node == "yyy": # ACCESS data base is known to fail some tests.
|
||||
if is64bit.Python():
|
||||
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||
else:
|
||||
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||
testmdb = setuptestframework.makemdb(testfolder)
|
||||
connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb)
|
||||
else: # try a remote connection to an SQL server
|
||||
conn_kws["proxy_host"] = "25.44.77.176"
|
||||
import adodbapi.remote
|
||||
|
||||
db = adodbapi.remote
|
||||
|
||||
print("Using Connection String like=%s" % connStr)
|
||||
print("Keywords=%s" % repr(conn_kws))
|
||||
|
||||
|
||||
class test_adodbapi(dbapi20.DatabaseAPI20Test):
|
||||
driver = db
|
||||
connect_args = (connStr,)
|
||||
connect_kw_args = conn_kws
|
||||
|
||||
def __init__(self, arg):
|
||||
dbapi20.DatabaseAPI20Test.__init__(self, arg)
|
||||
|
||||
def getTestMethodName(self):
|
||||
return self.id().split(".")[-1]
|
||||
|
||||
def setUp(self):
|
||||
# Call superclass setUp In case this does something in the
|
||||
# future
|
||||
dbapi20.DatabaseAPI20Test.setUp(self)
|
||||
if self.getTestMethodName() == "test_callproc":
|
||||
con = self._connect()
|
||||
engine = con.dbms_name
|
||||
## print('Using database Engine=%s' % engine) ##
|
||||
if engine != "MS Jet":
|
||||
sql = """
|
||||
create procedure templower
|
||||
@theData varchar(50)
|
||||
as
|
||||
select lower(@theData)
|
||||
"""
|
||||
else: # Jet
|
||||
sql = """
|
||||
create procedure templower
|
||||
(theData varchar(50))
|
||||
as
|
||||
select lower(theData);
|
||||
"""
|
||||
cur = con.cursor()
|
||||
try:
|
||||
cur.execute(sql)
|
||||
con.commit()
|
||||
except:
|
||||
pass
|
||||
cur.close()
|
||||
con.close()
|
||||
self.lower_func = "templower"
|
||||
|
||||
def tearDown(self):
|
||||
if self.getTestMethodName() == "test_callproc":
|
||||
con = self._connect()
|
||||
cur = con.cursor()
|
||||
try:
|
||||
cur.execute("drop procedure templower")
|
||||
except:
|
||||
pass
|
||||
con.commit()
|
||||
dbapi20.DatabaseAPI20Test.tearDown(self)
|
||||
|
||||
def help_nextset_setUp(self, cur):
|
||||
"Should create a procedure called deleteme"
|
||||
'that returns two result sets, first the number of rows in booze then "name from booze"'
|
||||
sql = """
|
||||
create procedure deleteme as
|
||||
begin
|
||||
select count(*) from %sbooze
|
||||
select name from %sbooze
|
||||
end
|
||||
""" % (
|
||||
self.table_prefix,
|
||||
self.table_prefix,
|
||||
)
|
||||
cur.execute(sql)
|
||||
|
||||
def help_nextset_tearDown(self, cur):
|
||||
"If cleaning up is needed after nextSetTest"
|
||||
try:
|
||||
cur.execute("drop procedure deleteme")
|
||||
except:
|
||||
pass
|
||||
|
||||
def test_nextset(self):
|
||||
con = self._connect()
|
||||
try:
|
||||
cur = con.cursor()
|
||||
|
||||
stmts = [self.ddl1] + self._populate()
|
||||
for sql in stmts:
|
||||
cur.execute(sql)
|
||||
|
||||
self.help_nextset_setUp(cur)
|
||||
|
||||
cur.callproc("deleteme")
|
||||
numberofrows = cur.fetchone()
|
||||
assert numberofrows[0] == 6
|
||||
assert cur.nextset()
|
||||
names = cur.fetchall()
|
||||
assert len(names) == len(self.samples)
|
||||
s = cur.nextset()
|
||||
assert s == None, "No more return sets, should return None"
|
||||
finally:
|
||||
try:
|
||||
self.help_nextset_tearDown(cur)
|
||||
finally:
|
||||
con.close()
|
||||
|
||||
def test_setoutputsize(self):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
cleanup(testfolder, None)
|
33
lib/adodbapi/test/tryconnection.py
Normal file
33
lib/adodbapi/test/tryconnection.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
remote = False # automatic testing of remote access has been removed here
|
||||
|
||||
|
||||
def try_connection(verbose, *args, **kwargs):
|
||||
import adodbapi
|
||||
|
||||
dbconnect = adodbapi.connect
|
||||
try:
|
||||
s = dbconnect(*args, **kwargs) # connect to server
|
||||
if verbose:
|
||||
print("Connected to:", s.connection_string)
|
||||
print("which has tables:", s.get_table_names())
|
||||
s.close() # thanks, it worked, goodbye
|
||||
except adodbapi.DatabaseError as inst:
|
||||
print(inst.args[0]) # should be the error message
|
||||
print("***Failed getting connection using=", repr(args), repr(kwargs))
|
||||
return False, (args, kwargs), None
|
||||
|
||||
print(" (successful)")
|
||||
|
||||
return True, (args, kwargs, remote), dbconnect
|
||||
|
||||
|
||||
def try_operation_with_expected_exception(
|
||||
expected_exception_list, some_function, *args, **kwargs
|
||||
):
|
||||
try:
|
||||
some_function(*args, **kwargs)
|
||||
except expected_exception_list as e:
|
||||
return True, e
|
||||
except:
|
||||
raise # an exception other than the expected occurred
|
||||
return False, "The expected exception did not occur"
|
396
lib/annotated_types/__init__.py
Normal file
396
lib/annotated_types/__init__.py
Normal file
|
@ -0,0 +1,396 @@
|
|||
import math
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import timezone
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
else:
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated, Literal
|
||||
else:
|
||||
from typing import Annotated, Literal
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
EllipsisType = type(Ellipsis)
|
||||
KW_ONLY = {}
|
||||
SLOTS = {}
|
||||
else:
|
||||
from types import EllipsisType
|
||||
|
||||
KW_ONLY = {"kw_only": True}
|
||||
SLOTS = {"slots": True}
|
||||
|
||||
|
||||
__all__ = (
|
||||
'BaseMetadata',
|
||||
'GroupedMetadata',
|
||||
'Gt',
|
||||
'Ge',
|
||||
'Lt',
|
||||
'Le',
|
||||
'Interval',
|
||||
'MultipleOf',
|
||||
'MinLen',
|
||||
'MaxLen',
|
||||
'Len',
|
||||
'Timezone',
|
||||
'Predicate',
|
||||
'LowerCase',
|
||||
'UpperCase',
|
||||
'IsDigits',
|
||||
'IsFinite',
|
||||
'IsNotFinite',
|
||||
'IsNan',
|
||||
'IsNotNan',
|
||||
'IsInfinite',
|
||||
'IsNotInfinite',
|
||||
'doc',
|
||||
'DocInfo',
|
||||
'__version__',
|
||||
)
|
||||
|
||||
__version__ = '0.6.0'
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# arguments that start with __ are considered
|
||||
# positional only
|
||||
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||
|
||||
|
||||
class SupportsGt(Protocol):
|
||||
def __gt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsGe(Protocol):
|
||||
def __ge__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLt(Protocol):
|
||||
def __lt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLe(Protocol):
|
||||
def __le__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsMod(Protocol):
|
||||
def __mod__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class SupportsDiv(Protocol):
|
||||
def __div__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class BaseMetadata:
|
||||
"""Base class for all metadata.
|
||||
|
||||
This exists mainly so that implementers
|
||||
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Gt(BaseMetadata):
|
||||
"""Gt(gt=x) implies that the value must be greater than x.
|
||||
|
||||
It can be used with any type that supports the ``>`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
gt: SupportsGt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Ge(BaseMetadata):
|
||||
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``>=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
ge: SupportsGe
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Lt(BaseMetadata):
|
||||
"""Lt(lt=x) implies that the value must be less than x.
|
||||
|
||||
It can be used with any type that supports the ``<`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
lt: SupportsLt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Le(BaseMetadata):
|
||||
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``<=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
le: SupportsLe
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class GroupedMetadata(Protocol):
|
||||
"""A grouping of multiple BaseMetadata objects.
|
||||
|
||||
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||
All it the the constraint and metadata should be fully expressable
|
||||
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||
|
||||
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||
to add their own metadata.
|
||||
For example:
|
||||
|
||||
>>> @dataclass
|
||||
>>> class Field(GroupedMetadata):
|
||||
>>> gt: float | None = None
|
||||
>>> description: str | None = None
|
||||
...
|
||||
>>> def __iter__(self) -> Iterable[BaseMetadata]:
|
||||
>>> if self.gt is not None:
|
||||
>>> yield Gt(self.gt)
|
||||
>>> if self.description is not None:
|
||||
>>> yield Description(self.gt)
|
||||
|
||||
Also see the implementation of `Interval` below for an example.
|
||||
|
||||
Parsers should recognize this and unpack it so that it can be used
|
||||
both with and without unpacking:
|
||||
|
||||
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||
""" # noqa: trailing-whitespace
|
||||
|
||||
@property
|
||||
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
...
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__slots__ = () # allow subclasses to use slots
|
||||
|
||||
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||
# Basic ABC like functionality without the complexity of an ABC
|
||||
super().__init_subclass__(*args, **kwargs)
|
||||
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811
|
||||
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||
|
||||
|
||||
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||
class Interval(GroupedMetadata):
|
||||
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||
|
||||
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||
are interpreted the same way as the single-bound constraints.
|
||||
"""
|
||||
|
||||
gt: Union[SupportsGt, None] = None
|
||||
ge: Union[SupportsGe, None] = None
|
||||
lt: Union[SupportsLt, None] = None
|
||||
le: Union[SupportsLe, None] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack an Interval into zero or more single-bounds."""
|
||||
if self.gt is not None:
|
||||
yield Gt(self.gt)
|
||||
if self.ge is not None:
|
||||
yield Ge(self.ge)
|
||||
if self.lt is not None:
|
||||
yield Lt(self.lt)
|
||||
if self.le is not None:
|
||||
yield Le(self.le)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MultipleOf(BaseMetadata):
|
||||
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||
|
||||
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||
|
||||
We encourage users to be aware of these two common interpretations,
|
||||
and libraries to carefully document which they implement.
|
||||
"""
|
||||
|
||||
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MinLen(BaseMetadata):
|
||||
"""
|
||||
MinLen() implies minimum inclusive length,
|
||||
e.g. ``len(value) >= min_length``.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MaxLen(BaseMetadata):
|
||||
"""
|
||||
MaxLen() implies maximum inclusive length,
|
||||
e.g. ``len(value) <= max_length``.
|
||||
"""
|
||||
|
||||
max_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Len(GroupedMetadata):
|
||||
"""
|
||||
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||
|
||||
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)] = 0
|
||||
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack a Len into zone or more single-bounds."""
|
||||
if self.min_length > 0:
|
||||
yield MinLen(self.min_length)
|
||||
if self.max_length is not None:
|
||||
yield MaxLen(self.max_length)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Timezone(BaseMetadata):
|
||||
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||
|
||||
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||
tz-aware but any timezone is allowed.
|
||||
|
||||
You may also pass a specific timezone string or timezone object such as
|
||||
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||
you only allow a specific timezone, though we note that this is often
|
||||
a symptom of poor design.
|
||||
"""
|
||||
|
||||
tz: Union[str, timezone, EllipsisType, None]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Predicate(BaseMetadata):
|
||||
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||
|
||||
Users should prefer statically inspectable metadata, but if you need the full
|
||||
power and flexibility of arbitrary runtime predicates... here it is.
|
||||
|
||||
We provide a few predefined predicates for common string constraints:
|
||||
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||
``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||
|
||||
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||
enforce digit-only strings, and customise some generated external schema.
|
||||
|
||||
We do not specify what behaviour should be expected for predicates that raise
|
||||
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||
and then propogate or discard the resulting exception.
|
||||
"""
|
||||
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Not:
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
def __call__(self, __v: Any) -> bool:
|
||||
return not self.func(__v)
|
||||
|
||||
|
||||
_StrType = TypeVar("_StrType", bound=str)
|
||||
|
||||
LowerCase = Annotated[_StrType, Predicate(str.islower)]
|
||||
"""
|
||||
Return True if the string is a lowercase string, False otherwise.
|
||||
|
||||
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
|
||||
"""
|
||||
Return True if the string is an uppercase string, False otherwise.
|
||||
|
||||
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
IsDigits = Annotated[_StrType, Predicate(str.isdigit)]
|
||||
"""
|
||||
Return True if the string is a digit string, False otherwise.
|
||||
|
||||
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
|
||||
""" # noqa: E501
|
||||
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
|
||||
"""
|
||||
Return True if all characters in the string are ASCII, False otherwise.
|
||||
|
||||
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
|
||||
"""
|
||||
|
||||
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
|
||||
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
|
||||
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
|
||||
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
|
||||
"""Return True if x is one of infinity or NaN, and False otherwise"""
|
||||
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
|
||||
"""Return True if x is a NaN (not a number), and False otherwise."""
|
||||
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
|
||||
"""Return True if x is anything but NaN (not a number), and False otherwise."""
|
||||
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
|
||||
"""Return True if x is a positive or negative infinity, and False otherwise."""
|
||||
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
|
||||
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
|
||||
|
||||
try:
|
||||
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
|
||||
except ImportError:
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class DocInfo: # type: ignore [no-redef]
|
||||
""" "
|
||||
The return value of doc(), mainly to be used by tools that want to extract the
|
||||
Annotated documentation at runtime.
|
||||
"""
|
||||
|
||||
documentation: str
|
||||
"""The documentation string passed to doc()."""
|
||||
|
||||
def doc(
|
||||
documentation: str,
|
||||
) -> DocInfo:
|
||||
"""
|
||||
Add documentation to a type annotation inside of Annotated.
|
||||
|
||||
For example:
|
||||
|
||||
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
|
||||
"""
|
||||
return DocInfo(documentation)
|
147
lib/annotated_types/test_cases.py
Normal file
147
lib/annotated_types/test_cases.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
import math
|
||||
import sys
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated
|
||||
else:
|
||||
from typing import Annotated
|
||||
|
||||
import annotated_types as at
|
||||
|
||||
|
||||
class Case(NamedTuple):
|
||||
"""
|
||||
A test case for `annotated_types`.
|
||||
"""
|
||||
|
||||
annotation: Any
|
||||
valid_cases: Iterable[Any]
|
||||
invalid_cases: Iterable[Any]
|
||||
|
||||
|
||||
def cases() -> Iterable[Case]:
|
||||
# Gt, Ge, Lt, Le
|
||||
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||
[Decimal('1.1231'), Decimal('123')],
|
||||
[Decimal('1.123'), Decimal('0')],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
# Interval
|
||||
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||
|
||||
# lengths
|
||||
|
||||
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
|
||||
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
|
||||
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||
|
||||
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||
|
||||
# Timezone
|
||||
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone('Europe/London')],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
|
||||
# predicate types
|
||||
|
||||
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||
yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2'])
|
||||
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||
|
||||
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||
|
||||
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
|
||||
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
|
||||
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
|
||||
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
|
||||
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
|
||||
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
|
||||
|
||||
# check stacked predicates
|
||||
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
|
||||
|
||||
# doc
|
||||
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
|
||||
|
||||
# custom GroupedMetadata
|
||||
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||
def __iter__(self) -> Iterator[at.Predicate]:
|
||||
yield at.Predicate(lambda x: float(x).is_integer())
|
||||
|
||||
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
|
@ -1 +1 @@
|
|||
__version__ = "1.2.3"
|
||||
__version__ = "1.3.0"
|
||||
|
|
|
@ -168,9 +168,9 @@ class Arrow:
|
|||
isinstance(tzinfo, dt_tzinfo)
|
||||
and hasattr(tzinfo, "localize")
|
||||
and hasattr(tzinfo, "zone")
|
||||
and tzinfo.zone # type: ignore[attr-defined]
|
||||
and tzinfo.zone
|
||||
):
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo.zone) # type: ignore[attr-defined]
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
|
||||
elif isinstance(tzinfo, str):
|
||||
tzinfo = parser.TzinfoParser.parse(tzinfo)
|
||||
|
||||
|
@ -495,7 +495,7 @@ class Arrow:
|
|||
yield current
|
||||
|
||||
values = [getattr(current, f) for f in cls._ATTRS]
|
||||
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore
|
||||
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
|
||||
**{frame_relative: relative_steps}
|
||||
)
|
||||
|
||||
|
@ -578,7 +578,7 @@ class Arrow:
|
|||
for _ in range(3 - len(values)):
|
||||
values.append(1)
|
||||
|
||||
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore
|
||||
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
|
||||
|
||||
if frame_absolute == "week":
|
||||
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7
|
||||
|
@ -792,7 +792,6 @@ class Arrow:
|
|||
return self._datetime.isoformat()
|
||||
|
||||
def __format__(self, formatstr: str) -> str:
|
||||
|
||||
if len(formatstr) > 0:
|
||||
return self.format(formatstr)
|
||||
|
||||
|
@ -804,7 +803,6 @@ class Arrow:
|
|||
# attributes and properties
|
||||
|
||||
def __getattr__(self, name: str) -> int:
|
||||
|
||||
if name == "week":
|
||||
return self.isocalendar()[1]
|
||||
|
||||
|
@ -965,7 +963,6 @@ class Arrow:
|
|||
absolute_kwargs = {}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS:
|
||||
absolute_kwargs[key] = value
|
||||
elif key in ["week", "quarter"]:
|
||||
|
@ -1022,7 +1019,6 @@ class Arrow:
|
|||
additional_attrs = ["weeks", "quarters", "weekday"]
|
||||
|
||||
for key, value in kwargs.items():
|
||||
|
||||
if key in self._ATTRS_PLURAL or key in additional_attrs:
|
||||
relative_kwargs[key] = value
|
||||
else:
|
||||
|
@ -1259,11 +1255,10 @@ class Arrow:
|
|||
)
|
||||
|
||||
if trunc(abs(delta)) != 1:
|
||||
granularity += "s" # type: ignore
|
||||
granularity += "s" # type: ignore[assignment]
|
||||
return locale.describe(granularity, delta, only_distance=only_distance)
|
||||
|
||||
else:
|
||||
|
||||
if not granularity:
|
||||
raise ValueError(
|
||||
"Empty granularity list provided. "
|
||||
|
@ -1314,7 +1309,7 @@ class Arrow:
|
|||
|
||||
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
|
||||
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
|
||||
the time difference relative to the attrbiutes of the
|
||||
the time difference relative to the attributes of the
|
||||
:class:`Arrow <arrow.arrow.Arrow>` object.
|
||||
|
||||
:param timestring: a ``str`` representing a humanized relative time.
|
||||
|
@ -1367,7 +1362,6 @@ class Arrow:
|
|||
|
||||
# Search input string for each time unit within locale
|
||||
for unit, unit_object in locale_obj.timeframes.items():
|
||||
|
||||
# Need to check the type of unit_object to create the correct dictionary
|
||||
if isinstance(unit_object, Mapping):
|
||||
strings_to_search = unit_object
|
||||
|
@ -1378,7 +1372,6 @@ class Arrow:
|
|||
# Needs to cycle all through strings as some locales have strings that
|
||||
# could overlap in a regex match, since input validation isn't being performed.
|
||||
for time_delta, time_string in strings_to_search.items():
|
||||
|
||||
# Replace {0} with regex \d representing digits
|
||||
search_string = str(time_string)
|
||||
search_string = search_string.format(r"\d+")
|
||||
|
@ -1419,7 +1412,7 @@ class Arrow:
|
|||
# Assert error if string does not modify any units
|
||||
if not any([True for k, v in unit_visited.items() if v]):
|
||||
raise ValueError(
|
||||
"Input string not valid. Note: Some locales do not support the week granulairty in Arrow. "
|
||||
"Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
|
||||
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
|
||||
)
|
||||
|
||||
|
@ -1718,7 +1711,6 @@ class Arrow:
|
|||
# math
|
||||
|
||||
def __add__(self, other: Any) -> "Arrow":
|
||||
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
|
||||
|
||||
|
@ -1736,7 +1728,6 @@ class Arrow:
|
|||
pass # pragma: no cover
|
||||
|
||||
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
|
||||
|
||||
if isinstance(other, (timedelta, relativedelta)):
|
||||
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
||||
|
||||
|
@ -1749,7 +1740,6 @@ class Arrow:
|
|||
return NotImplemented
|
||||
|
||||
def __rsub__(self, other: Any) -> timedelta:
|
||||
|
||||
if isinstance(other, dt_datetime):
|
||||
return other - self._datetime
|
||||
|
||||
|
@ -1758,42 +1748,36 @@ class Arrow:
|
|||
# comparisons
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return False
|
||||
|
||||
return self._datetime == self._get_datetime(other)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return True
|
||||
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime > self._get_datetime(other)
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime >= self._get_datetime(other)
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
return self._datetime < self._get_datetime(other)
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
|
||||
if not isinstance(other, (Arrow, dt_datetime)):
|
||||
return NotImplemented
|
||||
|
||||
|
@ -1865,7 +1849,6 @@ class Arrow:
|
|||
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
|
||||
"""Sets default end and limit values for range method."""
|
||||
if end is None:
|
||||
|
||||
if limit is None:
|
||||
raise ValueError("One of 'end' or 'limit' is required.")
|
||||
|
||||
|
|
|
@ -267,11 +267,9 @@ class ArrowFactory:
|
|||
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
|
||||
|
||||
elif arg_count == 2:
|
||||
|
||||
arg_1, arg_2 = args[0], args[1]
|
||||
|
||||
if isinstance(arg_1, datetime):
|
||||
|
||||
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo
|
||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||
return self.type.fromdatetime(arg_1, tzinfo=arg_2)
|
||||
|
@ -281,7 +279,6 @@ class ArrowFactory:
|
|||
)
|
||||
|
||||
elif isinstance(arg_1, date):
|
||||
|
||||
# (date, tzinfo/str) -> fromdate @ tzinfo
|
||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
||||
|
|
|
@ -29,7 +29,6 @@ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
|
|||
|
||||
|
||||
class DateTimeFormatter:
|
||||
|
||||
# This pattern matches characters enclosed in square brackets are matched as
|
||||
# an atomic group. For more info on atomic groups and how to they are
|
||||
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
|
||||
|
@ -41,18 +40,15 @@ class DateTimeFormatter:
|
|||
locale: locales.Locale
|
||||
|
||||
def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
|
||||
def format(cls, dt: datetime, fmt: str) -> str:
|
||||
|
||||
# FIXME: _format_token() is nullable
|
||||
return cls._FORMAT_RE.sub(
|
||||
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
|
||||
)
|
||||
|
||||
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
|
||||
|
||||
if token and token.startswith("[") and token.endswith("]"):
|
||||
return token[1:-1]
|
||||
|
||||
|
|
|
@ -129,7 +129,6 @@ class Locale:
|
|||
_locale_map[locale_name.lower().replace("_", "-")] = cls
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
self._month_name_to_ordinal = None
|
||||
|
||||
def describe(
|
||||
|
@ -174,7 +173,7 @@ class Locale:
|
|||
# Needed to determine the correct relative string to use
|
||||
timeframe_value = 0
|
||||
|
||||
for _unit_name, unit_value in timeframes:
|
||||
for _, unit_value in timeframes:
|
||||
if trunc(unit_value) != 0:
|
||||
timeframe_value = trunc(unit_value)
|
||||
break
|
||||
|
@ -285,7 +284,6 @@ class Locale:
|
|||
timeframe: TimeFrameLiteral,
|
||||
delta: Union[float, int],
|
||||
) -> str:
|
||||
|
||||
if timeframe == "now":
|
||||
return humanized
|
||||
|
||||
|
@ -425,7 +423,7 @@ class ItalianLocale(Locale):
|
|||
"hours": "{0} ore",
|
||||
"day": "un giorno",
|
||||
"days": "{0} giorni",
|
||||
"week": "una settimana,",
|
||||
"week": "una settimana",
|
||||
"weeks": "{0} settimane",
|
||||
"month": "un mese",
|
||||
"months": "{0} mesi",
|
||||
|
@ -867,14 +865,16 @@ class FinnishLocale(Locale):
|
|||
|
||||
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
|
||||
"now": "juuri nyt",
|
||||
"second": "sekunti",
|
||||
"seconds": {"past": "{0} muutama sekunti", "future": "{0} muutaman sekunnin"},
|
||||
"second": {"past": "sekunti", "future": "sekunnin"},
|
||||
"seconds": {"past": "{0} sekuntia", "future": "{0} sekunnin"},
|
||||
"minute": {"past": "minuutti", "future": "minuutin"},
|
||||
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
|
||||
"hour": {"past": "tunti", "future": "tunnin"},
|
||||
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
|
||||
"day": "päivä",
|
||||
"day": {"past": "päivä", "future": "päivän"},
|
||||
"days": {"past": "{0} päivää", "future": "{0} päivän"},
|
||||
"week": {"past": "viikko", "future": "viikon"},
|
||||
"weeks": {"past": "{0} viikkoa", "future": "{0} viikon"},
|
||||
"month": {"past": "kuukausi", "future": "kuukauden"},
|
||||
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
|
||||
"year": {"past": "vuosi", "future": "vuoden"},
|
||||
|
@ -1887,7 +1887,7 @@ class GermanBaseLocale(Locale):
|
|||
future = "in {0}"
|
||||
and_word = "und"
|
||||
|
||||
timeframes = {
|
||||
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||
"now": "gerade eben",
|
||||
"second": "einer Sekunde",
|
||||
"seconds": "{0} Sekunden",
|
||||
|
@ -1982,7 +1982,9 @@ class GermanBaseLocale(Locale):
|
|||
return super().describe(timeframe, delta, only_distance)
|
||||
|
||||
# German uses a different case without 'in' or 'ago'
|
||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
||||
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||
trunc(abs(delta))
|
||||
)
|
||||
|
||||
return humanized
|
||||
|
||||
|
@ -2547,6 +2549,8 @@ class ArabicLocale(Locale):
|
|||
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
|
||||
"day": "يوم",
|
||||
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
|
||||
"week": "اسبوع",
|
||||
"weeks": {"2": "اسبوعين", "ten": "{0} أسابيع", "higher": "{0} اسبوع"},
|
||||
"month": "شهر",
|
||||
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
|
||||
"year": "سنة",
|
||||
|
@ -3709,6 +3713,8 @@ class HungarianLocale(Locale):
|
|||
"hours": {"past": "{0} órával", "future": "{0} óra"},
|
||||
"day": {"past": "egy nappal", "future": "egy nap"},
|
||||
"days": {"past": "{0} nappal", "future": "{0} nap"},
|
||||
"week": {"past": "egy héttel", "future": "egy hét"},
|
||||
"weeks": {"past": "{0} héttel", "future": "{0} hét"},
|
||||
"month": {"past": "egy hónappal", "future": "egy hónap"},
|
||||
"months": {"past": "{0} hónappal", "future": "{0} hónap"},
|
||||
"year": {"past": "egy évvel", "future": "egy év"},
|
||||
|
@ -3934,7 +3940,6 @@ class ThaiLocale(Locale):
|
|||
|
||||
|
||||
class LaotianLocale(Locale):
|
||||
|
||||
names = ["lo", "lo-la"]
|
||||
|
||||
past = "{0} ກ່ອນຫນ້ານີ້"
|
||||
|
@ -4119,6 +4124,7 @@ class BengaliLocale(Locale):
|
|||
return f"{n}র্থ"
|
||||
if n == 6:
|
||||
return f"{n}ষ্ঠ"
|
||||
return ""
|
||||
|
||||
|
||||
class RomanshLocale(Locale):
|
||||
|
@ -4137,6 +4143,8 @@ class RomanshLocale(Locale):
|
|||
"hours": "{0} ura",
|
||||
"day": "in di",
|
||||
"days": "{0} dis",
|
||||
"week": "in'emna",
|
||||
"weeks": "{0} emnas",
|
||||
"month": "in mais",
|
||||
"months": "{0} mais",
|
||||
"year": "in onn",
|
||||
|
@ -5399,7 +5407,7 @@ class LuxembourgishLocale(Locale):
|
|||
future = "an {0}"
|
||||
and_word = "an"
|
||||
|
||||
timeframes = {
|
||||
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||
"now": "just elo",
|
||||
"second": "enger Sekonn",
|
||||
"seconds": "{0} Sekonnen",
|
||||
|
@ -5487,7 +5495,9 @@ class LuxembourgishLocale(Locale):
|
|||
return super().describe(timeframe, delta, only_distance)
|
||||
|
||||
# Luxembourgish uses a different case without 'in' or 'ago'
|
||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
||||
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||
trunc(abs(delta))
|
||||
)
|
||||
|
||||
return humanized
|
||||
|
||||
|
|
|
@ -159,7 +159,6 @@ class DateTimeParser:
|
|||
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
|
||||
|
||||
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
||||
self._input_re_map.update(
|
||||
|
@ -196,7 +195,6 @@ class DateTimeParser:
|
|||
def parse_iso(
|
||||
self, datetime_string: str, normalize_whitespace: bool = False
|
||||
) -> datetime:
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
|
||||
|
||||
|
@ -236,13 +234,14 @@ class DateTimeParser:
|
|||
]
|
||||
|
||||
if has_time:
|
||||
|
||||
if has_space_divider:
|
||||
date_string, time_string = datetime_string.split(" ", 1)
|
||||
else:
|
||||
date_string, time_string = datetime_string.split("T", 1)
|
||||
|
||||
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE)
|
||||
time_parts = re.split(
|
||||
r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
|
||||
|
||||
|
@ -303,7 +302,6 @@ class DateTimeParser:
|
|||
fmt: Union[List[str], str],
|
||||
normalize_whitespace: bool = False,
|
||||
) -> datetime:
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string)
|
||||
|
||||
|
@ -341,12 +339,11 @@ class DateTimeParser:
|
|||
f"Unable to find a match group for the specified token {token!r}."
|
||||
)
|
||||
|
||||
self._parse_token(token, value, parts) # type: ignore
|
||||
self._parse_token(token, value, parts) # type: ignore[arg-type]
|
||||
|
||||
return self._build_datetime(parts)
|
||||
|
||||
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
|
||||
|
||||
# fmt is a string of tokens like 'YYYY-MM-DD'
|
||||
# we construct a new string by replacing each
|
||||
# token by its pattern:
|
||||
|
@ -498,7 +495,6 @@ class DateTimeParser:
|
|||
value: Any,
|
||||
parts: _Parts,
|
||||
) -> None:
|
||||
|
||||
if token == "YYYY":
|
||||
parts["year"] = int(value)
|
||||
|
||||
|
@ -508,7 +504,7 @@ class DateTimeParser:
|
|||
|
||||
elif token in ["MMMM", "MMM"]:
|
||||
# FIXME: month_number() is nullable
|
||||
parts["month"] = self.locale.month_number(value.lower()) # type: ignore
|
||||
parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
|
||||
|
||||
elif token in ["MM", "M"]:
|
||||
parts["month"] = int(value)
|
||||
|
@ -588,7 +584,6 @@ class DateTimeParser:
|
|||
weekdate = parts.get("weekdate")
|
||||
|
||||
if weekdate is not None:
|
||||
|
||||
year, week = int(weekdate[0]), int(weekdate[1])
|
||||
|
||||
if weekdate[2] is not None:
|
||||
|
@ -712,7 +707,6 @@ class DateTimeParser:
|
|||
)
|
||||
|
||||
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
|
||||
|
||||
_datetime: Optional[datetime] = None
|
||||
|
||||
for fmt in formats:
|
||||
|
@ -740,12 +734,11 @@ class DateTimeParser:
|
|||
|
||||
class TzinfoParser:
|
||||
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
|
||||
r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$"
|
||||
r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def parse(cls, tzinfo_string: str) -> dt_tzinfo:
|
||||
|
||||
tzinfo: Optional[dt_tzinfo] = None
|
||||
|
||||
if tzinfo_string == "local":
|
||||
|
@ -755,7 +748,6 @@ class TzinfoParser:
|
|||
tzinfo = tz.tzutc()
|
||||
|
||||
else:
|
||||
|
||||
iso_match = cls._TZINFO_RE.match(tzinfo_string)
|
||||
|
||||
if iso_match:
|
||||
|
|
|
@ -20,7 +20,7 @@ from functools import wraps
|
|||
from inspect import signature
|
||||
|
||||
|
||||
def _launch_forever_coro(coro, args, kwargs, loop):
|
||||
async def _run_forever_coro(coro, args, kwargs, loop):
|
||||
'''
|
||||
This helper function launches an async main function that was tagged with
|
||||
forever=True. There are two possibilities:
|
||||
|
@ -48,7 +48,7 @@ def _launch_forever_coro(coro, args, kwargs, loop):
|
|||
# forever=True feature from autoasync at some point in the future.
|
||||
thing = coro(*args, **kwargs)
|
||||
if iscoroutine(thing):
|
||||
loop.create_task(thing)
|
||||
await thing
|
||||
|
||||
|
||||
def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
||||
|
@ -127,7 +127,9 @@ def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
|||
args, kwargs = bound_args.args, bound_args.kwargs
|
||||
|
||||
if forever:
|
||||
_launch_forever_coro(coro, args, kwargs, local_loop)
|
||||
local_loop.create_task(_run_forever_coro(
|
||||
coro, args, kwargs, local_loop
|
||||
))
|
||||
local_loop.run_forever()
|
||||
else:
|
||||
return local_loop.run_until_complete(coro(*args, **kwargs))
|
||||
|
|
|
@ -26,6 +26,12 @@ def update_wrapper(
|
|||
|
||||
|
||||
class _HashedSeq(list):
|
||||
"""This class guarantees that hash() will be called no more than once
|
||||
per element. This is important because the lru_cache() will hash
|
||||
the key multiple times on a cache miss.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
|
@ -41,45 +47,57 @@ def _make_key(
|
|||
kwds,
|
||||
typed,
|
||||
kwd_mark=(object(),),
|
||||
fasttypes=set([int, str, frozenset, type(None)]),
|
||||
sorted=sorted,
|
||||
fasttypes={int, str},
|
||||
tuple=tuple,
|
||||
type=type,
|
||||
len=len,
|
||||
):
|
||||
'Make a cache key from optionally typed positional and keyword arguments'
|
||||
"""Make a cache key from optionally typed positional and keyword arguments
|
||||
|
||||
The key is constructed in a way that is flat as possible rather than
|
||||
as a nested structure that would take more memory.
|
||||
|
||||
If there is only a single argument and its data type is known to cache
|
||||
its hash value, then that argument is returned without a wrapper. This
|
||||
saves space and improves lookup speed.
|
||||
|
||||
"""
|
||||
# All of code below relies on kwds preserving the order input by the user.
|
||||
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
||||
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
||||
# distinct call from f(y=2, x=1) which will be cached separately.
|
||||
key = args
|
||||
if kwds:
|
||||
sorted_items = sorted(kwds.items())
|
||||
key += kwd_mark
|
||||
for item in sorted_items:
|
||||
for item in kwds.items():
|
||||
key += item
|
||||
if typed:
|
||||
key += tuple(type(v) for v in args)
|
||||
if kwds:
|
||||
key += tuple(type(v) for k, v in sorted_items)
|
||||
key += tuple(type(v) for v in kwds.values())
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
|
||||
|
||||
def lru_cache(maxsize=100, typed=False): # noqa: C901
|
||||
def lru_cache(maxsize=128, typed=False):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
If *typed* is True, arguments of different types will be cached separately.
|
||||
For example, f(3.0) and f(3) will be treated as distinct calls with
|
||||
distinct results.
|
||||
For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
|
||||
distinct calls with distinct results. Some types such as str and int may
|
||||
be cached separately even when typed is false.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
||||
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
||||
|
||||
"""
|
||||
|
||||
|
@ -88,108 +106,138 @@ def lru_cache(maxsize=100, typed=False): # noqa: C901
|
|||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
if isinstance(maxsize, int):
|
||||
# Negative maxsize is treated as 0
|
||||
if maxsize < 0:
|
||||
maxsize = 0
|
||||
elif callable(maxsize) and isinstance(typed, bool):
|
||||
# The user_function was passed in directly via the maxsize argument
|
||||
user_function, maxsize = maxsize, 128
|
||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
elif maxsize is not None:
|
||||
raise TypeError('Expected first argument to be an integer, a callable, or None')
|
||||
|
||||
def decorating_function(user_function):
|
||||
cache = dict()
|
||||
stats = [0, 0] # make statistics updateable non-locally
|
||||
HITS, MISSES = 0, 1 # names for the stats fields
|
||||
make_key = _make_key
|
||||
cache_get = cache.get # bound method to lookup key or return None
|
||||
_len = len # localize the global len() function
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
nonlocal_root = [root] # make updateable non-locally
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# no caching, just do a statistics update after a successful call
|
||||
result = user_function(*args, **kwds)
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# simple caching without ordering or size limit
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(
|
||||
key, root
|
||||
) # root used here as a unique not-found sentinel
|
||||
if result is not root:
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# size limited caching that tracks accesses by recency
|
||||
key = make_key(args, kwds, typed) if kwds or typed else args
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# record recent use of the key by moving it
|
||||
# to the front of the list
|
||||
(root,) = nonlocal_root
|
||||
link_prev, link_next, key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
stats[HITS] += 1
|
||||
return result
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
(root,) = nonlocal_root
|
||||
if key in cache:
|
||||
# getting here means that this same key was added to the
|
||||
# cache while the lock was released. since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif _len(cache) >= maxsize:
|
||||
# use the old root to store the new key and result
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# empty the oldest link and make it the new root
|
||||
root = nonlocal_root[0] = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# now update the cache dictionary for the new links
|
||||
del cache[oldkey]
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# put result in a new link at the front of the list
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
stats[MISSES] += 1
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
root = nonlocal_root[0]
|
||||
root[:] = [root, root, None, None]
|
||||
stats[:] = [0, 0]
|
||||
|
||||
wrapper.__wrapped__ = user_function
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||
return update_wrapper(wrapper, user_function)
|
||||
|
||||
return decorating_function
|
||||
|
||||
|
||||
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
||||
# Constants shared by all lru cache instances:
|
||||
sentinel = object() # unique object used to signal cache misses
|
||||
make_key = _make_key # build a key from the function arguments
|
||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||
|
||||
cache = {}
|
||||
hits = misses = 0
|
||||
full = False
|
||||
cache_get = cache.get # bound method to lookup a key or return None
|
||||
cache_len = cache.__len__ # get cache size without calling len()
|
||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||
root = [] # root of the circular doubly linked list
|
||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||
|
||||
if maxsize == 0:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# No caching -- just a statistics update
|
||||
nonlocal misses
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
return result
|
||||
|
||||
elif maxsize is None:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# Simple caching without ordering or size limit
|
||||
nonlocal hits, misses
|
||||
key = make_key(args, kwds, typed)
|
||||
result = cache_get(key, sentinel)
|
||||
if result is not sentinel:
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
return result
|
||||
|
||||
else:
|
||||
|
||||
def wrapper(*args, **kwds):
|
||||
# Size limited caching that tracks accesses by recency
|
||||
nonlocal root, hits, misses, full
|
||||
key = make_key(args, kwds, typed)
|
||||
with lock:
|
||||
link = cache_get(key)
|
||||
if link is not None:
|
||||
# Move the link to the front of the circular queue
|
||||
link_prev, link_next, _key, result = link
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = link
|
||||
link[PREV] = last
|
||||
link[NEXT] = root
|
||||
hits += 1
|
||||
return result
|
||||
misses += 1
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
if key in cache:
|
||||
# Getting here means that this same key was added to the
|
||||
# cache while the lock was released. Since the link
|
||||
# update is already done, we need only return the
|
||||
# computed result and update the count of misses.
|
||||
pass
|
||||
elif full:
|
||||
# Use the old root to store the new key and result.
|
||||
oldroot = root
|
||||
oldroot[KEY] = key
|
||||
oldroot[RESULT] = result
|
||||
# Empty the oldest link and make it the new root.
|
||||
# Keep a reference to the old key and old result to
|
||||
# prevent their ref counts from going to zero during the
|
||||
# update. That will prevent potentially arbitrary object
|
||||
# clean-up code (i.e. __del__) from running while we're
|
||||
# still adjusting the links.
|
||||
root = oldroot[NEXT]
|
||||
oldkey = root[KEY]
|
||||
root[KEY] = root[RESULT] = None
|
||||
# Now update the cache dictionary.
|
||||
del cache[oldkey]
|
||||
# Save the potentially reentrant cache[key] assignment
|
||||
# for last, after the root and links have been put in
|
||||
# a consistent state.
|
||||
cache[key] = oldroot
|
||||
else:
|
||||
# Put result in a new link at the front of the queue.
|
||||
last = root[PREV]
|
||||
link = [last, root, key, result]
|
||||
last[NEXT] = root[PREV] = cache[key] = link
|
||||
# Use the cache_len bound method instead of the len() function
|
||||
# which could potentially be wrapped in an lru_cache itself.
|
||||
full = cache_len() >= maxsize
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(hits, misses, maxsize, cache_len())
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
nonlocal hits, misses, full
|
||||
with lock:
|
||||
cache.clear()
|
||||
root[:] = [root, root, None, None]
|
||||
hits = misses = 0
|
||||
full = False
|
||||
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return wrapper
|
||||
|
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
|||
|
||||
|
||||
# yyyymmdd
|
||||
__releasedate__ = "20230123"
|
||||
__releasedate__ = "20231006"
|
||||
# x.y.z or x.y.z.dev0 -- semver
|
||||
__version__ = "6.0.0"
|
||||
__version__ = "6.1.0"
|
||||
|
||||
|
||||
__all__ = ["clean", "linkify"]
|
||||
|
|
|
@ -395,10 +395,17 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
|||
# followed by a series of characters. It's treated as a tag
|
||||
# name that abruptly ends, but we should treat that like
|
||||
# character data
|
||||
yield {
|
||||
"type": TAG_TOKEN_TYPE_CHARACTERS,
|
||||
"data": "<" + self.currentToken["name"],
|
||||
}
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
elif last_error_token["data"] in (
|
||||
"eof-in-attribute-name",
|
||||
"eof-in-attribute-value-no-quotes",
|
||||
):
|
||||
# Handle the case where the text being parsed ends with <
|
||||
# followed by a series of characters and then space and then
|
||||
# more characters. It's treated as a tag name followed by an
|
||||
# attribute that abruptly ends, but we should treat that like
|
||||
# character data.
|
||||
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||
else:
|
||||
yield last_error_token
|
||||
|
||||
|
|
|
@ -45,8 +45,8 @@ def build_url_re(tlds=TLDS, protocols=html5lib_shim.allowed_protocols):
|
|||
r"""\(* # Match any opening parentheses.
|
||||
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
|
||||
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
||||
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)?
|
||||
# /path/zz (excluding "unsafe" chars from RFC 1738,
|
||||
(?:[/?][^\s\{{\}}\|\\\^`<>"]*)?
|
||||
# /path/zz (excluding "unsafe" chars from RFC 3986,
|
||||
# except for # and ~, which happen in practice)
|
||||
""".format(
|
||||
"|".join(sorted(protocols)), "|".join(sorted(tlds))
|
||||
|
@ -591,7 +591,7 @@ class LinkifyFilter(html5lib_shim.Filter):
|
|||
in_a = False
|
||||
token_buffer = []
|
||||
else:
|
||||
token_buffer.append(token)
|
||||
token_buffer.extend(list(self.extract_entities(token)))
|
||||
continue
|
||||
|
||||
if token["type"] in ["StartTag", "EmptyTag"]:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2023.07.22"
|
||||
__version__ = "2024.02.02"
|
||||
|
|
|
@ -245,34 +245,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
|
|||
4SVhM7JZG+Ju1zdXtg2pEto=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
|
||||
# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
|
||||
# Label: "Security Communication Root CA"
|
||||
# Serial: 0
|
||||
# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
|
||||
# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
|
||||
# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
|
||||
MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
|
||||
dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
|
||||
WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
|
||||
VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
|
||||
DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
|
||||
9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
|
||||
DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
|
||||
Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
|
||||
QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
|
||||
xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
|
||||
A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
|
||||
AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
|
||||
kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
|
||||
Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
|
||||
Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
|
||||
JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
|
||||
RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||
# Label: "XRamp Global CA Root"
|
||||
|
@ -881,49 +853,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
|
|||
WD9f
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
||||
# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
||||
# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
|
||||
# Serial: 6047274297262753887
|
||||
# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
|
||||
# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
|
||||
# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
|
||||
BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
|
||||
cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
|
||||
MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
|
||||
Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
|
||||
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
|
||||
thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
|
||||
cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
|
||||
L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
|
||||
NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
|
||||
X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
|
||||
m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
|
||||
Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
|
||||
EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
|
||||
KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
|
||||
6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
|
||||
OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
|
||||
VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
|
||||
cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
|
||||
ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
|
||||
AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
|
||||
661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
|
||||
am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
|
||||
ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
|
||||
PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
|
||||
3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
|
||||
SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
|
||||
3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
|
||||
ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
|
||||
StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
|
||||
Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
|
||||
jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Izenpe.com O=IZENPE S.A.
|
||||
# Subject: CN=Izenpe.com O=IZENPE S.A.
|
||||
# Label: "Izenpe.com"
|
||||
|
@ -4633,3 +4562,253 @@ o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
|
|||
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
||||
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||
# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||
# Label: "TrustAsia Global Root CA G3"
|
||||
# Serial: 576386314500428537169965010905813481816650257167
|
||||
# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
|
||||
# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
|
||||
# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
|
||||
BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
|
||||
ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
|
||||
Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
|
||||
IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
|
||||
cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
|
||||
DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
|
||||
T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
|
||||
AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
|
||||
nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
|
||||
qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
|
||||
yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
|
||||
hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
|
||||
zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
|
||||
kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
|
||||
f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
|
||||
uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
|
||||
o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
|
||||
MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
|
||||
BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
|
||||
wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
|
||||
XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
|
||||
JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
|
||||
ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
|
||||
VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
|
||||
xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
|
||||
AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
|
||||
7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
|
||||
gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
|
||||
+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
|
||||
FGWsJwt0ivKH
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||
# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||
# Label: "TrustAsia Global Root CA G4"
|
||||
# Serial: 451799571007117016466790293371524403291602933463
|
||||
# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
|
||||
# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
|
||||
# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
|
||||
WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
|
||||
IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
|
||||
MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
|
||||
VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
|
||||
dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
|
||||
s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
|
||||
LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
|
||||
YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
|
||||
pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
|
||||
AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
|
||||
UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
|
||||
/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||
# Label: "CommScope Public Trust ECC Root-01"
|
||||
# Serial: 385011430473757362783587124273108818652468453534
|
||||
# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
|
||||
# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
|
||||
# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
|
||||
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
|
||||
Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
|
||||
djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
|
||||
flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
|
||||
hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
|
||||
hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
|
||||
2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
|
||||
Um9poIyNStDuiw7LR47QjRE=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||
# Label: "CommScope Public Trust ECC Root-02"
|
||||
# Serial: 234015080301808452132356021271193974922492992893
|
||||
# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
|
||||
# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
|
||||
# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
|
||||
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
|
||||
Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
|
||||
djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
|
||||
j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
|
||||
v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
|
||||
hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
|
||||
ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
|
||||
mkzw5l4lIhVtwodZ0LKOag==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||
# Label: "CommScope Public Trust RSA Root-01"
|
||||
# Serial: 354030733275608256394402989253558293562031411421
|
||||
# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
|
||||
# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
|
||||
# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
|
||||
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
|
||||
NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||
MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
|
||||
YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
|
||||
suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
|
||||
DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
|
||||
WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
|
||||
P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
|
||||
KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
|
||||
UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
|
||||
kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
|
||||
Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
|
||||
Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
|
||||
CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
|
||||
KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
|
||||
NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
|
||||
nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
|
||||
QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
|
||||
trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
|
||||
aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
|
||||
j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
|
||||
Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
|
||||
lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
|
||||
YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
|
||||
icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||
# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||
# Label: "CommScope Public Trust RSA Root-02"
|
||||
# Serial: 480062499834624527752716769107743131258796508494
|
||||
# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
|
||||
# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
|
||||
# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
|
||||
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
|
||||
NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||
MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
|
||||
NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
|
||||
kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
|
||||
rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
|
||||
hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
|
||||
LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
|
||||
n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
|
||||
FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
|
||||
kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
|
||||
wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
|
||||
wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
|
||||
5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
|
||||
KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
|
||||
KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
|
||||
+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
|
||||
APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
|
||||
pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
|
||||
6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
|
||||
sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
|
||||
PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
|
||||
lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
|
||||
v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
|
||||
rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||
# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||
# Label: "Telekom Security TLS ECC Root 2020"
|
||||
# Serial: 72082518505882327255703894282316633856
|
||||
# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
|
||||
# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
|
||||
# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
|
||||
CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
|
||||
bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
|
||||
MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
|
||||
JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
|
||||
AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
|
||||
AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
|
||||
tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
|
||||
f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
|
||||
MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
|
||||
MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
|
||||
z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
|
||||
27iQ7t0l
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||
# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||
# Label: "Telekom Security TLS RSA Root 2023"
|
||||
# Serial: 44676229530606711399881795178081572759
|
||||
# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
|
||||
# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
|
||||
# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
|
||||
MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
|
||||
eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
|
||||
MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
|
||||
REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
|
||||
A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
|
||||
KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
|
||||
cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
|
||||
cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
|
||||
U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
|
||||
Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
|
||||
BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
|
||||
8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
|
||||
co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
|
||||
8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
|
||||
rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
|
||||
mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
|
||||
+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
|
||||
gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
|
||||
p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
|
||||
pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
|
||||
9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
|
||||
M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
|
||||
GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
|
||||
CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
|
||||
xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
|
||||
w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
|
||||
L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
|
||||
X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
||||
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -5,6 +5,10 @@ certifi.py
|
|||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
def exit_cacert_ctx() -> None:
|
||||
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
||||
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
|
@ -35,6 +39,7 @@ if sys.version_info >= (3, 11):
|
|||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
@ -70,6 +75,7 @@ elif sys.version_info >= (3, 7):
|
|||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
atexit.register(exit_cacert_ctx)
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
|
|
@ -452,6 +452,6 @@ class WSGIErrorHandler(logging.Handler):
|
|||
|
||||
class LazyRfc3339UtcTime(object):
|
||||
def __str__(self):
|
||||
"""Return now() in RFC3339 UTC Format."""
|
||||
now = datetime.datetime.now()
|
||||
return now.isoformat('T') + 'Z'
|
||||
"""Return utcnow() in RFC3339 UTC Format."""
|
||||
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
||||
return f'{iso_formatted_now!s}Z'
|
||||
|
|
|
@ -622,13 +622,15 @@ def autovary(ignore=None, debug=False):
|
|||
|
||||
|
||||
def convert_params(exception=ValueError, error=400):
|
||||
"""Convert request params based on function annotations, with error handling.
|
||||
"""Convert request params based on function annotations.
|
||||
|
||||
exception
|
||||
Exception class to catch.
|
||||
This function also processes errors that are subclasses of ``exception``.
|
||||
|
||||
status
|
||||
The HTTP error code to return to the client on failure.
|
||||
:param BaseException exception: Exception class to catch.
|
||||
:type exception: BaseException
|
||||
|
||||
:param error: The HTTP status code to return to the client on failure.
|
||||
:type error: int
|
||||
"""
|
||||
request = cherrypy.serving.request
|
||||
types = request.handler.callable.__annotations__
|
||||
|
|
|
@ -47,7 +47,9 @@ try:
|
|||
import pstats
|
||||
|
||||
def new_func_strip_path(func_name):
|
||||
"""Make profiler output more readable by adding `__init__` modules' parents
|
||||
"""Add ``__init__`` modules' parents.
|
||||
|
||||
This makes the profiler output more readable.
|
||||
"""
|
||||
filename, line, name = func_name
|
||||
if filename.endswith('__init__.py'):
|
||||
|
|
|
@ -188,7 +188,7 @@ class Parser(configparser.ConfigParser):
|
|||
|
||||
def dict_from_file(self, file):
|
||||
if hasattr(file, 'read'):
|
||||
self.readfp(file)
|
||||
self.read_file(file)
|
||||
else:
|
||||
self.read(file)
|
||||
return self.as_dict()
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
"""Module with helpers for serving static files."""
|
||||
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import stat
|
||||
import mimetypes
|
||||
import urllib.parse
|
||||
import unicodedata
|
||||
|
||||
import urllib.parse
|
||||
from email.generator import _make_boundary as make_boundary
|
||||
from io import UnsupportedOperation
|
||||
|
||||
import cherrypy
|
||||
from cherrypy._cpcompat import ntob
|
||||
from cherrypy.lib import cptools, httputil, file_generator_limited
|
||||
from cherrypy.lib import cptools, file_generator_limited, httputil
|
||||
|
||||
|
||||
def _setup_mimetypes():
|
||||
|
@ -185,7 +184,10 @@ def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
|
|||
|
||||
|
||||
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||
"""Internal. Set response.body to the given file object, perhaps ranged."""
|
||||
"""Set ``response.body`` to the given file object, perhaps ranged.
|
||||
|
||||
Internal helper.
|
||||
"""
|
||||
response = cherrypy.serving.response
|
||||
|
||||
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
|
||||
|
|
|
@ -494,7 +494,7 @@ class Bus(object):
|
|||
"Cannot reconstruct command from '-c'. "
|
||||
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
||||
except AttributeError:
|
||||
"""It looks Py_GetArgcArgv is completely absent in some environments
|
||||
"""It looks Py_GetArgcArgv's completely absent in some environments
|
||||
|
||||
It is known, that there's no Py_GetArgcArgv in MS Windows and
|
||||
``ctypes`` module is completely absent in Google AppEngine
|
||||
|
|
|
@ -136,6 +136,9 @@ class HTTPTests(helper.CPWebCase):
|
|||
self.assertStatus(200)
|
||||
self.assertBody(b'Hello world!')
|
||||
|
||||
response.close()
|
||||
c.close()
|
||||
|
||||
# Now send a message that has no Content-Length, but does send a body.
|
||||
# Verify that CP times out the socket and responds
|
||||
# with 411 Length Required.
|
||||
|
@ -159,6 +162,9 @@ class HTTPTests(helper.CPWebCase):
|
|||
self.status = str(response.status)
|
||||
self.assertStatus(411)
|
||||
|
||||
response.close()
|
||||
c.close()
|
||||
|
||||
def test_post_multipart(self):
|
||||
alphabet = 'abcdefghijklmnopqrstuvwxyz'
|
||||
# generate file contents for a large post
|
||||
|
@ -184,6 +190,9 @@ class HTTPTests(helper.CPWebCase):
|
|||
parts = ['%s * 65536' % ch for ch in alphabet]
|
||||
self.assertBody(', '.join(parts))
|
||||
|
||||
response.close()
|
||||
c.close()
|
||||
|
||||
def test_post_filename_with_special_characters(self):
|
||||
"""Testing that we can handle filenames with special characters.
|
||||
|
||||
|
@ -217,6 +226,9 @@ class HTTPTests(helper.CPWebCase):
|
|||
self.assertStatus(200)
|
||||
self.assertBody(fname)
|
||||
|
||||
response.close()
|
||||
c.close()
|
||||
|
||||
def test_malformed_request_line(self):
|
||||
if getattr(cherrypy.server, 'using_apache', False):
|
||||
return self.skip('skipped due to known Apache differences...')
|
||||
|
@ -264,6 +276,9 @@ class HTTPTests(helper.CPWebCase):
|
|||
self.body = response.fp.read(20)
|
||||
self.assertBody('Illegal header line.')
|
||||
|
||||
response.close()
|
||||
c.close()
|
||||
|
||||
def test_http_over_https(self):
|
||||
if self.scheme != 'https':
|
||||
return self.skip('skipped (not running HTTPS)... ')
|
||||
|
|
|
@ -150,6 +150,8 @@ class IteratorTest(helper.CPWebCase):
|
|||
self.assertStatus(200)
|
||||
self.assertBody('0')
|
||||
|
||||
itr_conn.close()
|
||||
|
||||
# Now we do the same check with streaming - some classes will
|
||||
# be automatically closed, while others cannot.
|
||||
stream_counts = {}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Basic tests for the CherryPy core: request handling."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
from cheroot.test import webtest
|
||||
|
@ -197,6 +198,33 @@ def test_custom_log_format(log_tracker, monkeypatch, server):
|
|||
)
|
||||
|
||||
|
||||
def test_utc_in_timez(monkeypatch):
|
||||
"""Test that ``LazyRfc3339UtcTime`` is rendered as ``str`` using UTC timestamp."""
|
||||
utcoffset8_local_time_in_naive_utc = (
|
||||
datetime.datetime(
|
||||
year=2020,
|
||||
month=1,
|
||||
day=1,
|
||||
hour=1,
|
||||
minute=23,
|
||||
second=45,
|
||||
tzinfo=datetime.timezone(datetime.timedelta(hours=8)),
|
||||
)
|
||||
.astimezone(datetime.timezone.utc)
|
||||
.replace(tzinfo=None)
|
||||
)
|
||||
|
||||
class mock_datetime:
|
||||
@classmethod
|
||||
def utcnow(cls):
|
||||
return utcoffset8_local_time_in_naive_utc
|
||||
|
||||
monkeypatch.setattr('datetime.datetime', mock_datetime)
|
||||
rfc3339_utc_time = str(cherrypy._cplogging.LazyRfc3339UtcTime())
|
||||
expected_time = '2019-12-31T17:23:45Z'
|
||||
assert rfc3339_utc_time == expected_time
|
||||
|
||||
|
||||
def test_timez_log_format(log_tracker, monkeypatch, server):
|
||||
"""Test a customized access_log_format string, which is a
|
||||
feature of _cplogging.LogManager.access()."""
|
||||
|
|
6
lib/dateutil-stubs/METADATA.toml
Normal file
6
lib/dateutil-stubs/METADATA.toml
Normal file
|
@ -0,0 +1,6 @@
|
|||
version = "2.9.*"
|
||||
upstream_repository = "https://github.com/dateutil/dateutil"
|
||||
partial_stub = true
|
||||
|
||||
[tool.stubtest]
|
||||
ignore_missing_stub = true
|
0
lib/dateutil-stubs/__init__.pyi
Normal file
0
lib/dateutil-stubs/__init__.pyi
Normal file
9
lib/dateutil-stubs/_common.pyi
Normal file
9
lib/dateutil-stubs/_common.pyi
Normal file
|
@ -0,0 +1,9 @@
|
|||
from typing_extensions import Self
|
||||
|
||||
class weekday:
|
||||
def __init__(self, weekday: int, n: int | None = None) -> None: ...
|
||||
def __call__(self, n: int) -> Self: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
weekday: int
|
||||
n: int
|
8
lib/dateutil-stubs/easter.pyi
Normal file
8
lib/dateutil-stubs/easter.pyi
Normal file
|
@ -0,0 +1,8 @@
|
|||
from datetime import date
|
||||
from typing import Literal
|
||||
|
||||
EASTER_JULIAN: Literal[1]
|
||||
EASTER_ORTHODOX: Literal[2]
|
||||
EASTER_WESTERN: Literal[3]
|
||||
|
||||
def easter(year: int, method: Literal[1, 2, 3] = 3) -> date: ...
|
67
lib/dateutil-stubs/parser/__init__.pyi
Normal file
67
lib/dateutil-stubs/parser/__init__.pyi
Normal file
|
@ -0,0 +1,67 @@
|
|||
from collections.abc import Callable, Mapping
|
||||
from datetime import datetime, tzinfo
|
||||
from typing import IO, Any
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from .isoparser import isoparse as isoparse, isoparser as isoparser
|
||||
|
||||
_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any]
|
||||
_TzData: TypeAlias = tzinfo | int | str | None
|
||||
_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData]
|
||||
|
||||
class parserinfo:
|
||||
JUMP: list[str]
|
||||
WEEKDAYS: list[tuple[str, ...]]
|
||||
MONTHS: list[tuple[str, ...]]
|
||||
HMS: list[tuple[str, str, str]]
|
||||
AMPM: list[tuple[str, str]]
|
||||
UTCZONE: list[str]
|
||||
PERTAIN: list[str]
|
||||
TZOFFSET: dict[str, int]
|
||||
def __init__(self, dayfirst: bool = False, yearfirst: bool = False) -> None: ...
|
||||
def jump(self, name: str) -> bool: ...
|
||||
def weekday(self, name: str) -> int | None: ...
|
||||
def month(self, name: str) -> int | None: ...
|
||||
def hms(self, name: str) -> int | None: ...
|
||||
def ampm(self, name: str) -> int | None: ...
|
||||
def pertain(self, name: str) -> bool: ...
|
||||
def utczone(self, name: str) -> bool: ...
|
||||
def tzoffset(self, name: str) -> int | None: ...
|
||||
def convertyear(self, year: int) -> int: ...
|
||||
def validate(self, res: datetime) -> bool: ...
|
||||
|
||||
class parser:
|
||||
def __init__(self, info: parserinfo | None = None) -> None: ...
|
||||
def parse(
|
||||
self,
|
||||
timestr: _FileOrStr,
|
||||
default: datetime | None = None,
|
||||
ignoretz: bool = False,
|
||||
tzinfos: _TzInfo | None = None,
|
||||
*,
|
||||
dayfirst: bool | None = ...,
|
||||
yearfirst: bool | None = ...,
|
||||
fuzzy: bool = ...,
|
||||
fuzzy_with_tokens: bool = ...,
|
||||
) -> datetime: ...
|
||||
|
||||
DEFAULTPARSER: parser
|
||||
|
||||
def parse(
|
||||
timestr: _FileOrStr,
|
||||
parserinfo: parserinfo | None = None,
|
||||
*,
|
||||
dayfirst: bool | None = ...,
|
||||
yearfirst: bool | None = ...,
|
||||
ignoretz: bool = ...,
|
||||
fuzzy: bool = ...,
|
||||
fuzzy_with_tokens: bool = ...,
|
||||
default: datetime | None = ...,
|
||||
tzinfos: _TzInfo | None = ...,
|
||||
) -> datetime: ...
|
||||
|
||||
class _tzparser: ...
|
||||
|
||||
DEFAULTTZPARSER: _tzparser
|
||||
|
||||
class ParserError(ValueError): ...
|
15
lib/dateutil-stubs/parser/isoparser.pyi
Normal file
15
lib/dateutil-stubs/parser/isoparser.pyi
Normal file
|
@ -0,0 +1,15 @@
|
|||
from _typeshed import SupportsRead
|
||||
from datetime import date, datetime, time, tzinfo
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_Readable: TypeAlias = SupportsRead[str | bytes]
|
||||
_TakesAscii: TypeAlias = str | bytes | _Readable
|
||||
|
||||
class isoparser:
|
||||
def __init__(self, sep: str | bytes | None = None): ...
|
||||
def isoparse(self, dt_str: _TakesAscii) -> datetime: ...
|
||||
def parse_isodate(self, datestr: _TakesAscii) -> date: ...
|
||||
def parse_isotime(self, timestr: _TakesAscii) -> time: ...
|
||||
def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = True) -> tzinfo: ...
|
||||
|
||||
def isoparse(dt_str: _TakesAscii) -> datetime: ...
|
1
lib/dateutil-stubs/py.typed
Normal file
1
lib/dateutil-stubs/py.typed
Normal file
|
@ -0,0 +1 @@
|
|||
partial
|
97
lib/dateutil-stubs/relativedelta.pyi
Normal file
97
lib/dateutil-stubs/relativedelta.pyi
Normal file
|
@ -0,0 +1,97 @@
|
|||
from datetime import date, timedelta
|
||||
from typing import SupportsFloat, TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
# See #9817 for why we reexport this here
|
||||
from ._common import weekday as weekday
|
||||
|
||||
_DateT = TypeVar("_DateT", bound=date)
|
||||
# Work around attribute and type having the same name.
|
||||
_Weekday: TypeAlias = weekday
|
||||
|
||||
MO: weekday
|
||||
TU: weekday
|
||||
WE: weekday
|
||||
TH: weekday
|
||||
FR: weekday
|
||||
SA: weekday
|
||||
SU: weekday
|
||||
|
||||
class relativedelta:
|
||||
years: int
|
||||
months: int
|
||||
days: int
|
||||
leapdays: int
|
||||
hours: int
|
||||
minutes: int
|
||||
seconds: int
|
||||
microseconds: int
|
||||
year: int | None
|
||||
month: int | None
|
||||
weekday: _Weekday | None
|
||||
day: int | None
|
||||
hour: int | None
|
||||
minute: int | None
|
||||
second: int | None
|
||||
microsecond: int | None
|
||||
def __init__(
|
||||
self,
|
||||
dt1: date | None = None,
|
||||
dt2: date | None = None,
|
||||
years: int | None = 0,
|
||||
months: int | None = 0,
|
||||
days: int | None = 0,
|
||||
leapdays: int | None = 0,
|
||||
weeks: int | None = 0,
|
||||
hours: int | None = 0,
|
||||
minutes: int | None = 0,
|
||||
seconds: int | None = 0,
|
||||
microseconds: int | None = 0,
|
||||
year: int | None = None,
|
||||
month: int | None = None,
|
||||
day: int | None = None,
|
||||
weekday: int | _Weekday | None = None,
|
||||
yearday: int | None = None,
|
||||
nlyearday: int | None = None,
|
||||
hour: int | None = None,
|
||||
minute: int | None = None,
|
||||
second: int | None = None,
|
||||
microsecond: int | None = None,
|
||||
) -> None: ...
|
||||
@property
|
||||
def weeks(self) -> int: ...
|
||||
@weeks.setter
|
||||
def weeks(self, value: int) -> None: ...
|
||||
def normalized(self) -> Self: ...
|
||||
# TODO: use Union when mypy will handle it properly in overloaded operator
|
||||
# methods (#2129, #1442, #1264 in mypy)
|
||||
@overload
|
||||
def __add__(self, other: relativedelta) -> Self: ...
|
||||
@overload
|
||||
def __add__(self, other: timedelta) -> Self: ...
|
||||
@overload
|
||||
def __add__(self, other: _DateT) -> _DateT: ...
|
||||
@overload
|
||||
def __radd__(self, other: relativedelta) -> Self: ...
|
||||
@overload
|
||||
def __radd__(self, other: timedelta) -> Self: ...
|
||||
@overload
|
||||
def __radd__(self, other: _DateT) -> _DateT: ...
|
||||
@overload
|
||||
def __rsub__(self, other: relativedelta) -> Self: ...
|
||||
@overload
|
||||
def __rsub__(self, other: timedelta) -> Self: ...
|
||||
@overload
|
||||
def __rsub__(self, other: _DateT) -> _DateT: ...
|
||||
def __sub__(self, other: relativedelta) -> Self: ...
|
||||
def __neg__(self) -> Self: ...
|
||||
def __bool__(self) -> bool: ...
|
||||
def __nonzero__(self) -> bool: ...
|
||||
def __mul__(self, other: SupportsFloat) -> Self: ...
|
||||
def __rmul__(self, other: SupportsFloat) -> Self: ...
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __ne__(self, other: object) -> bool: ...
|
||||
def __div__(self, other: SupportsFloat) -> Self: ...
|
||||
def __truediv__(self, other: SupportsFloat) -> Self: ...
|
||||
def __abs__(self) -> Self: ...
|
||||
def __hash__(self) -> int: ...
|
111
lib/dateutil-stubs/rrule.pyi
Normal file
111
lib/dateutil-stubs/rrule.pyi
Normal file
|
@ -0,0 +1,111 @@
|
|||
import datetime
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Iterable, Iterator, Sequence
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from ._common import weekday as weekdaybase
|
||||
|
||||
YEARLY: int
|
||||
MONTHLY: int
|
||||
WEEKLY: int
|
||||
DAILY: int
|
||||
HOURLY: int
|
||||
MINUTELY: int
|
||||
SECONDLY: int
|
||||
|
||||
class weekday(weekdaybase): ...
|
||||
|
||||
weekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday]
|
||||
MO: weekday
|
||||
TU: weekday
|
||||
WE: weekday
|
||||
TH: weekday
|
||||
FR: weekday
|
||||
SA: weekday
|
||||
SU: weekday
|
||||
|
||||
class rrulebase:
|
||||
def __init__(self, cache: bool = False) -> None: ...
|
||||
def __iter__(self) -> Iterator[datetime.datetime]: ...
|
||||
def __getitem__(self, item): ...
|
||||
def __contains__(self, item): ...
|
||||
def count(self): ...
|
||||
def before(self, dt, inc: bool = False): ...
|
||||
def after(self, dt, inc: bool = False): ...
|
||||
def xafter(self, dt, count: Incomplete | None = None, inc: bool = False): ...
|
||||
def between(self, after, before, inc: bool = False, count: int = 1): ...
|
||||
|
||||
class rrule(rrulebase):
|
||||
def __init__(
|
||||
self,
|
||||
freq,
|
||||
dtstart: datetime.date | None = None,
|
||||
interval: int = 1,
|
||||
wkst: weekday | int | None = None,
|
||||
count: int | None = None,
|
||||
until: datetime.date | int | None = None,
|
||||
bysetpos: int | Iterable[int] | None = None,
|
||||
bymonth: int | Iterable[int] | None = None,
|
||||
bymonthday: int | Iterable[int] | None = None,
|
||||
byyearday: int | Iterable[int] | None = None,
|
||||
byeaster: int | Iterable[int] | None = None,
|
||||
byweekno: int | Iterable[int] | None = None,
|
||||
byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = None,
|
||||
byhour: int | Iterable[int] | None = None,
|
||||
byminute: int | Iterable[int] | None = None,
|
||||
bysecond: int | Iterable[int] | None = None,
|
||||
cache: bool = False,
|
||||
) -> None: ...
|
||||
def replace(self, **kwargs): ...
|
||||
|
||||
_RRule: TypeAlias = rrule
|
||||
|
||||
class _iterinfo:
|
||||
rrule: _RRule
|
||||
def __init__(self, rrule: _RRule) -> None: ...
|
||||
yearlen: int | None
|
||||
nextyearlen: int | None
|
||||
yearordinal: int | None
|
||||
yearweekday: int | None
|
||||
mmask: Sequence[int] | None
|
||||
mdaymask: Sequence[int] | None
|
||||
nmdaymask: Sequence[int] | None
|
||||
wdaymask: Sequence[int] | None
|
||||
mrange: Sequence[int] | None
|
||||
wnomask: Sequence[int] | None
|
||||
nwdaymask: Sequence[int] | None
|
||||
eastermask: Sequence[int] | None
|
||||
lastyear: int | None
|
||||
lastmonth: int | None
|
||||
def rebuild(self, year, month): ...
|
||||
def ydayset(self, year, month, day): ...
|
||||
def mdayset(self, year, month, day): ...
|
||||
def wdayset(self, year, month, day): ...
|
||||
def ddayset(self, year, month, day): ...
|
||||
def htimeset(self, hour, minute, second): ...
|
||||
def mtimeset(self, hour, minute, second): ...
|
||||
def stimeset(self, hour, minute, second): ...
|
||||
|
||||
class rruleset(rrulebase):
|
||||
class _genitem:
|
||||
dt: Incomplete
|
||||
genlist: list[Incomplete]
|
||||
gen: Incomplete
|
||||
def __init__(self, genlist, gen) -> None: ...
|
||||
def __next__(self) -> None: ...
|
||||
next = __next__
|
||||
def __lt__(self, other) -> bool: ...
|
||||
def __gt__(self, other) -> bool: ...
|
||||
def __eq__(self, other) -> bool: ...
|
||||
def __ne__(self, other) -> bool: ...
|
||||
|
||||
def __init__(self, cache: bool = False) -> None: ...
|
||||
def rrule(self, rrule: _RRule): ...
|
||||
def rdate(self, rdate): ...
|
||||
def exrule(self, exrule): ...
|
||||
def exdate(self, exdate): ...
|
||||
|
||||
class _rrulestr:
|
||||
def __call__(self, s, **kwargs) -> rrule | rruleset: ...
|
||||
|
||||
rrulestr: _rrulestr
|
15
lib/dateutil-stubs/tz/__init__.pyi
Normal file
15
lib/dateutil-stubs/tz/__init__.pyi
Normal file
|
@ -0,0 +1,15 @@
|
|||
from .tz import (
|
||||
datetime_ambiguous as datetime_ambiguous,
|
||||
datetime_exists as datetime_exists,
|
||||
gettz as gettz,
|
||||
resolve_imaginary as resolve_imaginary,
|
||||
tzfile as tzfile,
|
||||
tzical as tzical,
|
||||
tzlocal as tzlocal,
|
||||
tzoffset as tzoffset,
|
||||
tzrange as tzrange,
|
||||
tzstr as tzstr,
|
||||
tzutc as tzutc,
|
||||
)
|
||||
|
||||
UTC: tzutc
|
28
lib/dateutil-stubs/tz/_common.pyi
Normal file
28
lib/dateutil-stubs/tz/_common.pyi
Normal file
|
@ -0,0 +1,28 @@
|
|||
import abc
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
from typing import ClassVar
|
||||
|
||||
def tzname_in_python2(namefunc): ...
|
||||
def enfold(dt: datetime, fold: int = 1): ...
|
||||
|
||||
class _DatetimeWithFold(datetime):
|
||||
@property
|
||||
def fold(self): ...
|
||||
|
||||
# Doesn't actually have ABCMeta as the metaclass at runtime,
|
||||
# but mypy complains if we don't have it in the stub.
|
||||
# See discussion in #8908
|
||||
class _tzinfo(tzinfo, metaclass=abc.ABCMeta):
|
||||
def is_ambiguous(self, dt: datetime) -> bool: ...
|
||||
def fromutc(self, dt: datetime) -> datetime: ...
|
||||
|
||||
class tzrangebase(_tzinfo):
|
||||
def __init__(self) -> None: ...
|
||||
def utcoffset(self, dt: datetime | None) -> timedelta | None: ...
|
||||
def dst(self, dt: datetime | None) -> timedelta | None: ...
|
||||
def tzname(self, dt: datetime | None) -> str: ...
|
||||
def fromutc(self, dt: datetime) -> datetime: ...
|
||||
def is_ambiguous(self, dt: datetime) -> bool: ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
__reduce__ = object.__reduce__
|
115
lib/dateutil-stubs/tz/tz.pyi
Normal file
115
lib/dateutil-stubs/tz/tz.pyi
Normal file
|
@ -0,0 +1,115 @@
|
|||
import datetime
|
||||
from _typeshed import Incomplete
|
||||
from typing import ClassVar, Literal, Protocol, TypeVar
|
||||
|
||||
from ..relativedelta import relativedelta
|
||||
from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase
|
||||
|
||||
_DT = TypeVar("_DT", bound=datetime.datetime)
|
||||
|
||||
ZERO: datetime.timedelta
|
||||
EPOCH: datetime.datetime
|
||||
EPOCHORDINAL: int
|
||||
|
||||
class tzutc(datetime.tzinfo):
|
||||
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||
def fromutc(self, dt: _DT) -> _DT: ...
|
||||
def __eq__(self, other): ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
class tzoffset(datetime.tzinfo):
|
||||
def __init__(self, name, offset) -> None: ...
|
||||
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||
def fromutc(self, dt: _DT) -> _DT: ...
|
||||
def __eq__(self, other): ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
__reduce__ = object.__reduce__
|
||||
@classmethod
|
||||
def instance(cls, name, offset) -> tzoffset: ...
|
||||
|
||||
class tzlocal(_tzinfo):
|
||||
def __init__(self) -> None: ...
|
||||
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||
def __eq__(self, other): ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
class _ttinfo:
|
||||
def __init__(self) -> None: ...
|
||||
def __eq__(self, other): ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
|
||||
class _TZFileReader(Protocol):
|
||||
# optional attribute:
|
||||
# name: str
|
||||
def read(self, size: int, /) -> bytes: ...
|
||||
def seek(self, target: int, whence: Literal[1], /) -> object: ...
|
||||
|
||||
class tzfile(_tzinfo):
|
||||
def __init__(self, fileobj: str | _TZFileReader, filename: str | None = None) -> None: ...
|
||||
def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = None) -> bool: ...
|
||||
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||
def __eq__(self, other): ...
|
||||
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||
def __ne__(self, other): ...
|
||||
def __reduce__(self): ...
|
||||
def __reduce_ex__(self, protocol): ...
|
||||
|
||||
class tzrange(tzrangebase):
|
||||
hasdst: bool
|
||||
def __init__(
|
||||
self,
|
||||
stdabbr: str,
|
||||
stdoffset: int | datetime.timedelta | None = None,
|
||||
dstabbr: str | None = None,
|
||||
dstoffset: int | datetime.timedelta | None = None,
|
||||
start: relativedelta | None = None,
|
||||
end: relativedelta | None = None,
|
||||
) -> None: ...
|
||||
def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ...
|
||||
def __eq__(self, other): ...
|
||||
|
||||
class tzstr(tzrange):
|
||||
hasdst: bool
|
||||
def __init__(self, s: str, posix_offset: bool = False) -> None: ...
|
||||
@classmethod
|
||||
def instance(cls, name, offset) -> tzoffset: ...
|
||||
|
||||
class _ICalReader(Protocol):
|
||||
# optional attribute:
|
||||
# name: str
|
||||
def read(self) -> str: ...
|
||||
|
||||
class tzical:
|
||||
def __init__(self, fileobj: str | _ICalReader) -> None: ...
|
||||
def keys(self): ...
|
||||
def get(self, tzid: Incomplete | None = None): ...
|
||||
|
||||
TZFILES: list[str]
|
||||
TZPATHS: list[str]
|
||||
|
||||
def datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
|
||||
def datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
|
||||
def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ...
|
||||
|
||||
class _GetTZ:
|
||||
def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ...
|
||||
def nocache(self, name: str | None) -> datetime.tzinfo | None: ...
|
||||
|
||||
gettz: _GetTZ
|
5
lib/dateutil-stubs/utils.pyi
Normal file
5
lib/dateutil-stubs/utils.pyi
Normal file
|
@ -0,0 +1,5 @@
|
|||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...
|
||||
def today(tzinfo: tzinfo | None = None) -> datetime: ...
|
||||
def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...
|
17
lib/dateutil-stubs/zoneinfo/__init__.pyi
Normal file
17
lib/dateutil-stubs/zoneinfo/__init__.pyi
Normal file
|
@ -0,0 +1,17 @@
|
|||
from _typeshed import Incomplete
|
||||
from typing import IO
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
|
||||
|
||||
_MetadataType: TypeAlias = dict[str, Incomplete]
|
||||
|
||||
class ZoneInfoFile:
|
||||
zones: dict[Incomplete, Incomplete]
|
||||
metadata: _MetadataType | None
|
||||
def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ...
|
||||
def get(self, name, default: Incomplete | None = None): ...
|
||||
|
||||
def get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ...
|
||||
def gettz(name): ...
|
||||
def gettz_db_metadata() -> _MetadataType: ...
|
11
lib/dateutil-stubs/zoneinfo/rebuild.pyi
Normal file
11
lib/dateutil-stubs/zoneinfo/rebuild.pyi
Normal file
|
@ -0,0 +1,11 @@
|
|||
from _typeshed import Incomplete, StrOrBytesPath
|
||||
from collections.abc import Sequence
|
||||
from tarfile import TarInfo
|
||||
|
||||
def rebuild(
|
||||
filename: StrOrBytesPath,
|
||||
tag: Incomplete | None = None,
|
||||
format: str = "gz",
|
||||
zonegroups: Sequence[str | TarInfo] = [],
|
||||
metadata: Incomplete | None = None,
|
||||
) -> None: ...
|
|
@ -1,4 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
|
||||
try:
|
||||
from ._version import version as __version__
|
||||
except ImportError:
|
||||
|
@ -6,3 +8,17 @@ except ImportError:
|
|||
|
||||
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
||||
'utils', 'zoneinfo']
|
||||
|
||||
def __getattr__(name):
|
||||
import importlib
|
||||
|
||||
if name in __all__:
|
||||
return importlib.import_module("." + name, __name__)
|
||||
raise AttributeError(
|
||||
"module {!r} has not attribute {!r}".format(__name__, name)
|
||||
)
|
||||
|
||||
|
||||
def __dir__():
|
||||
# __dir__ should include all the lazy-importable modules as well.
|
||||
return [x for x in globals() if x not in sys.modules] + __all__
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
# coding: utf-8
|
||||
# file generated by setuptools_scm
|
||||
# don't change, don't track in version control
|
||||
version = '2.8.2'
|
||||
version_tuple = (2, 8, 2)
|
||||
__version__ = version = '2.9.0.post0'
|
||||
__version_tuple__ = version_tuple = (2, 9, 0)
|
||||
|
|
|
@ -72,7 +72,7 @@ class isoparser(object):
|
|||
Common:
|
||||
|
||||
- ``YYYY``
|
||||
- ``YYYY-MM`` or ``YYYYMM``
|
||||
- ``YYYY-MM``
|
||||
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
||||
|
||||
Uncommon:
|
||||
|
|
|
@ -182,7 +182,7 @@ class rrulebase(object):
|
|||
# __len__() introduces a large performance penalty.
|
||||
def count(self):
|
||||
""" Returns the number of recurrences in this set. It will have go
|
||||
trough the whole recurrence, if this hasn't been done before. """
|
||||
through the whole recurrence, if this hasn't been done before. """
|
||||
if self._len is None:
|
||||
for x in self:
|
||||
pass
|
||||
|
|
|
@ -34,7 +34,7 @@ except ImportError:
|
|||
from warnings import warn
|
||||
|
||||
ZERO = datetime.timedelta(0)
|
||||
EPOCH = datetime.datetime.utcfromtimestamp(0)
|
||||
EPOCH = datetime.datetime(1970, 1, 1, 0, 0)
|
||||
EPOCHORDINAL = EPOCH.toordinal()
|
||||
|
||||
|
||||
|
|
Binary file not shown.
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2015,2016,2017 Nir Cohen
|
||||
# Copyright 2015-2021 Nir Cohen
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
@ -55,7 +55,7 @@ except ImportError:
|
|||
# Python 3.7
|
||||
TypedDict = dict
|
||||
|
||||
__version__ = "1.8.0"
|
||||
__version__ = "1.9.0"
|
||||
|
||||
|
||||
class VersionDict(TypedDict):
|
||||
|
@ -125,6 +125,7 @@ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
|||
# Base file names to be looked up for if _UNIXCONFDIR is not readable.
|
||||
_DISTRO_RELEASE_BASENAMES = [
|
||||
"SuSE-release",
|
||||
"altlinux-release",
|
||||
"arch-release",
|
||||
"base-release",
|
||||
"centos-release",
|
||||
|
@ -151,6 +152,8 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = (
|
|||
"system-release",
|
||||
"plesk-release",
|
||||
"iredmail-release",
|
||||
"board-release",
|
||||
"ec2_version",
|
||||
)
|
||||
|
||||
|
||||
|
@ -243,6 +246,7 @@ def id() -> str:
|
|||
"rocky" Rocky Linux
|
||||
"aix" AIX
|
||||
"guix" Guix System
|
||||
"altlinux" ALT Linux
|
||||
============== =========================================
|
||||
|
||||
If you have a need to get distros for reliable IDs added into this set,
|
||||
|
@ -991,10 +995,10 @@ class LinuxDistribution:
|
|||
|
||||
For details, see :func:`distro.info`.
|
||||
"""
|
||||
return dict(
|
||||
return InfoDict(
|
||||
id=self.id(),
|
||||
version=self.version(pretty, best),
|
||||
version_parts=dict(
|
||||
version_parts=VersionDict(
|
||||
major=self.major_version(best),
|
||||
minor=self.minor_version(best),
|
||||
build_number=self.build_number(best),
|
||||
|
|
|
@ -7,7 +7,9 @@ import socket
|
|||
import sys
|
||||
|
||||
import dns._asyncbackend
|
||||
import dns._features
|
||||
import dns.exception
|
||||
import dns.inet
|
||||
|
||||
_is_win32 = sys.platform == "win32"
|
||||
|
||||
|
@ -121,7 +123,7 @@ class StreamSocket(dns._asyncbackend.StreamSocket):
|
|||
return self.writer.get_extra_info("peercert")
|
||||
|
||||
|
||||
try:
|
||||
if dns._features.have("doh"):
|
||||
import anyio
|
||||
import httpcore
|
||||
import httpcore._backends.anyio
|
||||
|
@ -205,7 +207,7 @@ try:
|
|||
resolver, local_port, bootstrap_address, family
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
else:
|
||||
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
||||
|
||||
|
||||
|
@ -224,14 +226,12 @@ class Backend(dns._asyncbackend.Backend):
|
|||
ssl_context=None,
|
||||
server_hostname=None,
|
||||
):
|
||||
if destination is None and socktype == socket.SOCK_DGRAM and _is_win32:
|
||||
raise NotImplementedError(
|
||||
"destinationless datagram sockets "
|
||||
"are not supported by asyncio "
|
||||
"on Windows"
|
||||
)
|
||||
loop = _get_running_loop()
|
||||
if socktype == socket.SOCK_DGRAM:
|
||||
if _is_win32 and source is None:
|
||||
# Win32 wants explicit binding before recvfrom(). This is the
|
||||
# proper fix for [#637].
|
||||
source = (dns.inet.any_for_af(af), 0)
|
||||
transport, protocol = await loop.create_datagram_endpoint(
|
||||
_DatagramProtocol,
|
||||
source,
|
||||
|
@ -266,7 +266,7 @@ class Backend(dns._asyncbackend.Backend):
|
|||
await asyncio.sleep(interval)
|
||||
|
||||
def datagram_connection_required(self):
|
||||
return _is_win32
|
||||
return False
|
||||
|
||||
def get_transport_class(self):
|
||||
return _HTTPTransport
|
||||
|
|
92
lib/dns/_features.py
Normal file
92
lib/dns/_features.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
import importlib.metadata
|
||||
import itertools
|
||||
import string
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
|
||||
def _tuple_from_text(version: str) -> Tuple:
|
||||
text_parts = version.split(".")
|
||||
int_parts = []
|
||||
for text_part in text_parts:
|
||||
digit_prefix = "".join(
|
||||
itertools.takewhile(lambda x: x in string.digits, text_part)
|
||||
)
|
||||
try:
|
||||
int_parts.append(int(digit_prefix))
|
||||
except Exception:
|
||||
break
|
||||
return tuple(int_parts)
|
||||
|
||||
|
||||
def _version_check(
|
||||
requirement: str,
|
||||
) -> bool:
|
||||
"""Is the requirement fulfilled?
|
||||
|
||||
The requirement must be of the form
|
||||
|
||||
package>=version
|
||||
"""
|
||||
package, minimum = requirement.split(">=")
|
||||
try:
|
||||
version = importlib.metadata.version(package)
|
||||
except Exception:
|
||||
return False
|
||||
t_version = _tuple_from_text(version)
|
||||
t_minimum = _tuple_from_text(minimum)
|
||||
if t_version < t_minimum:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
_cache: Dict[str, bool] = {}
|
||||
|
||||
|
||||
def have(feature: str) -> bool:
|
||||
"""Is *feature* available?
|
||||
|
||||
This tests if all optional packages needed for the
|
||||
feature are available and recent enough.
|
||||
|
||||
Returns ``True`` if the feature is available,
|
||||
and ``False`` if it is not or if metadata is
|
||||
missing.
|
||||
"""
|
||||
value = _cache.get(feature)
|
||||
if value is not None:
|
||||
return value
|
||||
requirements = _requirements.get(feature)
|
||||
if requirements is None:
|
||||
# we make a cache entry here for consistency not performance
|
||||
_cache[feature] = False
|
||||
return False
|
||||
ok = True
|
||||
for requirement in requirements:
|
||||
if not _version_check(requirement):
|
||||
ok = False
|
||||
break
|
||||
_cache[feature] = ok
|
||||
return ok
|
||||
|
||||
|
||||
def force(feature: str, enabled: bool) -> None:
|
||||
"""Force the status of *feature* to be *enabled*.
|
||||
|
||||
This method is provided as a workaround for any cases
|
||||
where importlib.metadata is ineffective, or for testing.
|
||||
"""
|
||||
_cache[feature] = enabled
|
||||
|
||||
|
||||
_requirements: Dict[str, List[str]] = {
|
||||
### BEGIN generated requirements
|
||||
"dnssec": ["cryptography>=41"],
|
||||
"doh": ["httpcore>=1.0.0", "httpx>=0.26.0", "h2>=4.1.0"],
|
||||
"doq": ["aioquic>=0.9.25"],
|
||||
"idna": ["idna>=3.6"],
|
||||
"trio": ["trio>=0.23"],
|
||||
"wmi": ["wmi>=1.5.1"],
|
||||
### END generated requirements
|
||||
}
|
|
@ -8,9 +8,13 @@ import trio
|
|||
import trio.socket # type: ignore
|
||||
|
||||
import dns._asyncbackend
|
||||
import dns._features
|
||||
import dns.exception
|
||||
import dns.inet
|
||||
|
||||
if not dns._features.have("trio"):
|
||||
raise ImportError("trio not found or too old")
|
||||
|
||||
|
||||
def _maybe_timeout(timeout):
|
||||
if timeout is not None:
|
||||
|
@ -95,7 +99,7 @@ class StreamSocket(dns._asyncbackend.StreamSocket):
|
|||
raise NotImplementedError
|
||||
|
||||
|
||||
try:
|
||||
if dns._features.have("doh"):
|
||||
import httpcore
|
||||
import httpcore._backends.trio
|
||||
import httpx
|
||||
|
@ -177,7 +181,7 @@ try:
|
|||
resolver, local_port, bootstrap_address, family
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
else:
|
||||
_HTTPTransport = dns._asyncbackend.NullTransport # type: ignore
|
||||
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ def get_backend(name: str) -> Backend:
|
|||
*name*, a ``str``, the name of the backend. Currently the "trio"
|
||||
and "asyncio" backends are available.
|
||||
|
||||
Raises NotImplementError if an unknown backend name is specified.
|
||||
Raises NotImplementedError if an unknown backend name is specified.
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel,redefined-outer-name
|
||||
backend = _backends.get(name)
|
||||
|
|
|
@ -41,7 +41,7 @@ from dns.query import (
|
|||
NoDOQ,
|
||||
UDPMode,
|
||||
_compute_times,
|
||||
_have_http2,
|
||||
_make_dot_ssl_context,
|
||||
_matches_destination,
|
||||
_remaining,
|
||||
have_doh,
|
||||
|
@ -120,6 +120,8 @@ async def receive_udp(
|
|||
request_mac: Optional[bytes] = b"",
|
||||
ignore_trailing: bool = False,
|
||||
raise_on_truncation: bool = False,
|
||||
ignore_errors: bool = False,
|
||||
query: Optional[dns.message.Message] = None,
|
||||
) -> Any:
|
||||
"""Read a DNS message from a UDP socket.
|
||||
|
||||
|
@ -133,22 +135,40 @@ async def receive_udp(
|
|||
"""
|
||||
|
||||
wire = b""
|
||||
while 1:
|
||||
while True:
|
||||
(wire, from_address) = await sock.recvfrom(65535, _timeout(expiration))
|
||||
if _matches_destination(
|
||||
if not _matches_destination(
|
||||
sock.family, from_address, destination, ignore_unexpected
|
||||
):
|
||||
break
|
||||
received_time = time.time()
|
||||
r = dns.message.from_wire(
|
||||
wire,
|
||||
keyring=keyring,
|
||||
request_mac=request_mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
raise_on_truncation=raise_on_truncation,
|
||||
)
|
||||
return (r, received_time, from_address)
|
||||
continue
|
||||
received_time = time.time()
|
||||
try:
|
||||
r = dns.message.from_wire(
|
||||
wire,
|
||||
keyring=keyring,
|
||||
request_mac=request_mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
raise_on_truncation=raise_on_truncation,
|
||||
)
|
||||
except dns.message.Truncated as e:
|
||||
# See the comment in query.py for details.
|
||||
if (
|
||||
ignore_errors
|
||||
and query is not None
|
||||
and not query.is_response(e.message())
|
||||
):
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
except Exception:
|
||||
if ignore_errors:
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
if ignore_errors and query is not None and not query.is_response(r):
|
||||
continue
|
||||
return (r, received_time, from_address)
|
||||
|
||||
|
||||
async def udp(
|
||||
|
@ -164,6 +184,7 @@ async def udp(
|
|||
raise_on_truncation: bool = False,
|
||||
sock: Optional[dns.asyncbackend.DatagramSocket] = None,
|
||||
backend: Optional[dns.asyncbackend.Backend] = None,
|
||||
ignore_errors: bool = False,
|
||||
) -> dns.message.Message:
|
||||
"""Return the response obtained after sending a query via UDP.
|
||||
|
||||
|
@ -205,9 +226,13 @@ async def udp(
|
|||
q.mac,
|
||||
ignore_trailing,
|
||||
raise_on_truncation,
|
||||
ignore_errors,
|
||||
q,
|
||||
)
|
||||
r.time = received_time - begin_time
|
||||
if not q.is_response(r):
|
||||
# We don't need to check q.is_response() if we are in ignore_errors mode
|
||||
# as receive_udp() will have checked it.
|
||||
if not (ignore_errors or q.is_response(r)):
|
||||
raise BadResponse
|
||||
return r
|
||||
|
||||
|
@ -225,6 +250,7 @@ async def udp_with_fallback(
|
|||
udp_sock: Optional[dns.asyncbackend.DatagramSocket] = None,
|
||||
tcp_sock: Optional[dns.asyncbackend.StreamSocket] = None,
|
||||
backend: Optional[dns.asyncbackend.Backend] = None,
|
||||
ignore_errors: bool = False,
|
||||
) -> Tuple[dns.message.Message, bool]:
|
||||
"""Return the response to the query, trying UDP first and falling back
|
||||
to TCP if UDP results in a truncated response.
|
||||
|
@ -260,6 +286,7 @@ async def udp_with_fallback(
|
|||
True,
|
||||
udp_sock,
|
||||
backend,
|
||||
ignore_errors,
|
||||
)
|
||||
return (response, False)
|
||||
except dns.message.Truncated:
|
||||
|
@ -292,14 +319,12 @@ async def send_tcp(
|
|||
"""
|
||||
|
||||
if isinstance(what, dns.message.Message):
|
||||
wire = what.to_wire()
|
||||
tcpmsg = what.to_wire(prepend_length=True)
|
||||
else:
|
||||
wire = what
|
||||
l = len(wire)
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = len(what).to_bytes(2, "big") + what
|
||||
sent_time = time.time()
|
||||
await sock.sendall(tcpmsg, _timeout(expiration, sent_time))
|
||||
return (len(tcpmsg), sent_time)
|
||||
|
@ -418,6 +443,7 @@ async def tls(
|
|||
backend: Optional[dns.asyncbackend.Backend] = None,
|
||||
ssl_context: Optional[ssl.SSLContext] = None,
|
||||
server_hostname: Optional[str] = None,
|
||||
verify: Union[bool, str] = True,
|
||||
) -> dns.message.Message:
|
||||
"""Return the response obtained after sending a query via TLS.
|
||||
|
||||
|
@ -439,11 +465,7 @@ async def tls(
|
|||
cm: contextlib.AbstractAsyncContextManager = NullContext(sock)
|
||||
else:
|
||||
if ssl_context is None:
|
||||
# See the comment about ssl.create_default_context() in query.py
|
||||
ssl_context = ssl.create_default_context() # lgtm[py/insecure-protocol]
|
||||
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
if server_hostname is None:
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context = _make_dot_ssl_context(server_hostname, verify)
|
||||
af = dns.inet.af_for_address(where)
|
||||
stuple = _source_tuple(af, source, source_port)
|
||||
dtuple = (where, port)
|
||||
|
@ -538,7 +560,7 @@ async def https(
|
|||
transport = backend.get_transport_class()(
|
||||
local_address=local_address,
|
||||
http1=True,
|
||||
http2=_have_http2,
|
||||
http2=True,
|
||||
verify=verify,
|
||||
local_port=local_port,
|
||||
bootstrap_address=bootstrap_address,
|
||||
|
@ -550,7 +572,7 @@ async def https(
|
|||
cm: contextlib.AbstractAsyncContextManager = NullContext(client)
|
||||
else:
|
||||
cm = httpx.AsyncClient(
|
||||
http1=True, http2=_have_http2, verify=verify, transport=transport
|
||||
http1=True, http2=True, verify=verify, transport=transport
|
||||
)
|
||||
|
||||
async with cm as the_client:
|
||||
|
|
|
@ -27,6 +27,7 @@ import time
|
|||
from datetime import datetime
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast
|
||||
|
||||
import dns._features
|
||||
import dns.exception
|
||||
import dns.name
|
||||
import dns.node
|
||||
|
@ -1169,7 +1170,7 @@ def _need_pyca(*args, **kwargs):
|
|||
) # pragma: no cover
|
||||
|
||||
|
||||
try:
|
||||
if dns._features.have("dnssec"):
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611
|
||||
from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611
|
||||
|
@ -1184,20 +1185,20 @@ try:
|
|||
get_algorithm_cls_from_dnskey,
|
||||
)
|
||||
from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey
|
||||
except ImportError: # pragma: no cover
|
||||
validate = _need_pyca
|
||||
validate_rrsig = _need_pyca
|
||||
sign = _need_pyca
|
||||
make_dnskey = _need_pyca
|
||||
make_cdnskey = _need_pyca
|
||||
_have_pyca = False
|
||||
else:
|
||||
|
||||
validate = _validate # type: ignore
|
||||
validate_rrsig = _validate_rrsig # type: ignore
|
||||
sign = _sign
|
||||
make_dnskey = _make_dnskey
|
||||
make_cdnskey = _make_cdnskey
|
||||
_have_pyca = True
|
||||
else: # pragma: no cover
|
||||
validate = _need_pyca
|
||||
validate_rrsig = _need_pyca
|
||||
sign = _need_pyca
|
||||
make_dnskey = _need_pyca
|
||||
make_cdnskey = _need_pyca
|
||||
_have_pyca = False
|
||||
|
||||
### BEGIN generated Algorithm constants
|
||||
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
from typing import Dict, Optional, Tuple, Type, Union
|
||||
|
||||
import dns.name
|
||||
from dns.dnssecalgs.base import GenericPrivateKey
|
||||
from dns.dnssectypes import Algorithm
|
||||
from dns.exception import UnsupportedAlgorithm
|
||||
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||
|
||||
try:
|
||||
from dns.dnssecalgs.base import GenericPrivateKey
|
||||
if dns._features.have("dnssec"):
|
||||
from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1
|
||||
from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384
|
||||
from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519
|
||||
|
@ -16,13 +19,9 @@ try:
|
|||
)
|
||||
|
||||
_have_cryptography = True
|
||||
except ImportError:
|
||||
else:
|
||||
_have_cryptography = False
|
||||
|
||||
from dns.dnssectypes import Algorithm
|
||||
from dns.exception import UnsupportedAlgorithm
|
||||
from dns.rdtypes.ANY.DNSKEY import DNSKEY
|
||||
|
||||
AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]]
|
||||
|
||||
algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
"""EDNS Options"""
|
||||
|
||||
import binascii
|
||||
import math
|
||||
import socket
|
||||
import struct
|
||||
|
@ -58,7 +59,6 @@ class OptionType(dns.enum.IntEnum):
|
|||
|
||||
|
||||
class Option:
|
||||
|
||||
"""Base class for all EDNS option types."""
|
||||
|
||||
def __init__(self, otype: Union[OptionType, str]):
|
||||
|
@ -76,6 +76,9 @@ class Option:
|
|||
"""
|
||||
raise NotImplementedError # pragma: no cover
|
||||
|
||||
def to_text(self) -> str:
|
||||
raise NotImplementedError # pragma: no cover
|
||||
|
||||
@classmethod
|
||||
def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option":
|
||||
"""Build an EDNS option object from wire format.
|
||||
|
@ -141,7 +144,6 @@ class Option:
|
|||
|
||||
|
||||
class GenericOption(Option): # lgtm[py/missing-equals]
|
||||
|
||||
"""Generic Option Class
|
||||
|
||||
This class is used for EDNS option types for which we have no better
|
||||
|
@ -343,6 +345,8 @@ class EDECode(dns.enum.IntEnum):
|
|||
class EDEOption(Option): # lgtm[py/missing-equals]
|
||||
"""Extended DNS Error (EDE, RFC8914)"""
|
||||
|
||||
_preserve_case = {"DNSKEY", "DS", "DNSSEC", "RRSIGs", "NSEC", "NXDOMAIN"}
|
||||
|
||||
def __init__(self, code: Union[EDECode, str], text: Optional[str] = None):
|
||||
"""*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the
|
||||
extended error.
|
||||
|
@ -360,6 +364,13 @@ class EDEOption(Option): # lgtm[py/missing-equals]
|
|||
|
||||
def to_text(self) -> str:
|
||||
output = f"EDE {self.code}"
|
||||
if self.code in EDECode:
|
||||
desc = EDECode.to_text(self.code)
|
||||
desc = " ".join(
|
||||
word if word in self._preserve_case else word.title()
|
||||
for word in desc.split("_")
|
||||
)
|
||||
output += f" ({desc})"
|
||||
if self.text is not None:
|
||||
output += f": {self.text}"
|
||||
return output
|
||||
|
@ -392,9 +403,37 @@ class EDEOption(Option): # lgtm[py/missing-equals]
|
|||
return cls(code, btext)
|
||||
|
||||
|
||||
class NSIDOption(Option):
|
||||
def __init__(self, nsid: bytes):
|
||||
super().__init__(OptionType.NSID)
|
||||
self.nsid = nsid
|
||||
|
||||
def to_wire(self, file: Any = None) -> Optional[bytes]:
|
||||
if file:
|
||||
file.write(self.nsid)
|
||||
return None
|
||||
else:
|
||||
return self.nsid
|
||||
|
||||
def to_text(self) -> str:
|
||||
if all(c >= 0x20 and c <= 0x7E for c in self.nsid):
|
||||
# All ASCII printable, so it's probably a string.
|
||||
value = self.nsid.decode()
|
||||
else:
|
||||
value = binascii.hexlify(self.nsid).decode()
|
||||
return f"NSID {value}"
|
||||
|
||||
@classmethod
|
||||
def from_wire_parser(
|
||||
cls, otype: Union[OptionType, str], parser: dns.wire.Parser
|
||||
) -> Option:
|
||||
return cls(parser.get_remaining())
|
||||
|
||||
|
||||
_type_to_class: Dict[OptionType, Any] = {
|
||||
OptionType.ECS: ECSOption,
|
||||
OptionType.EDE: EDEOption,
|
||||
OptionType.NSID: NSIDOption,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,24 +1,30 @@
|
|||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
import collections.abc
|
||||
from typing import Any
|
||||
from typing import Any, Callable
|
||||
|
||||
from dns._immutable_ctx import immutable
|
||||
|
||||
|
||||
@immutable
|
||||
class Dict(collections.abc.Mapping): # lgtm[py/missing-equals]
|
||||
def __init__(self, dictionary: Any, no_copy: bool = False):
|
||||
def __init__(
|
||||
self,
|
||||
dictionary: Any,
|
||||
no_copy: bool = False,
|
||||
map_factory: Callable[[], collections.abc.MutableMapping] = dict,
|
||||
):
|
||||
"""Make an immutable dictionary from the specified dictionary.
|
||||
|
||||
If *no_copy* is `True`, then *dictionary* will be wrapped instead
|
||||
of copied. Only set this if you are sure there will be no external
|
||||
references to the dictionary.
|
||||
"""
|
||||
if no_copy and isinstance(dictionary, dict):
|
||||
if no_copy and isinstance(dictionary, collections.abc.MutableMapping):
|
||||
self._odict = dictionary
|
||||
else:
|
||||
self._odict = dict(dictionary)
|
||||
self._odict = map_factory()
|
||||
self._odict.update(dictionary)
|
||||
self._hash = None
|
||||
|
||||
def __getitem__(self, key):
|
||||
|
|
|
@ -178,3 +178,20 @@ def any_for_af(af):
|
|||
elif af == socket.AF_INET6:
|
||||
return "::"
|
||||
raise NotImplementedError(f"unknown address family {af}")
|
||||
|
||||
|
||||
def canonicalize(text: str) -> str:
|
||||
"""Verify that *address* is a valid text form IPv4 or IPv6 address and return its
|
||||
canonical text form. IPv6 addresses with scopes are rejected.
|
||||
|
||||
*text*, a ``str``, the address in textual form.
|
||||
|
||||
Raises ``ValueError`` if the text is not valid.
|
||||
"""
|
||||
try:
|
||||
return dns.ipv6.canonicalize(text)
|
||||
except Exception:
|
||||
try:
|
||||
return dns.ipv4.canonicalize(text)
|
||||
except Exception:
|
||||
raise ValueError
|
||||
|
|
|
@ -62,3 +62,16 @@ def inet_aton(text: Union[str, bytes]) -> bytes:
|
|||
return struct.pack("BBBB", *b)
|
||||
except Exception:
|
||||
raise dns.exception.SyntaxError
|
||||
|
||||
|
||||
def canonicalize(text: Union[str, bytes]) -> str:
|
||||
"""Verify that *address* is a valid text form IPv4 address and return its
|
||||
canonical text form.
|
||||
|
||||
*text*, a ``str`` or ``bytes``, the IPv4 address in textual form.
|
||||
|
||||
Raises ``dns.exception.SyntaxError`` if the text is not valid.
|
||||
"""
|
||||
# Note that inet_aton() only accepts canonial form, but we still run through
|
||||
# inet_ntoa() to ensure the output is a str.
|
||||
return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text))
|
||||
|
|
|
@ -104,7 +104,7 @@ _colon_colon_end = re.compile(rb".*::$")
|
|||
def inet_aton(text: Union[str, bytes], ignore_scope: bool = False) -> bytes:
|
||||
"""Convert an IPv6 address in text form to binary form.
|
||||
|
||||
*text*, a ``str``, the IPv6 address in textual form.
|
||||
*text*, a ``str`` or ``bytes``, the IPv6 address in textual form.
|
||||
|
||||
*ignore_scope*, a ``bool``. If ``True``, a scope will be ignored.
|
||||
If ``False``, the default, it is an error for a scope to be present.
|
||||
|
@ -206,3 +206,14 @@ def is_mapped(address: bytes) -> bool:
|
|||
"""
|
||||
|
||||
return address.startswith(_mapped_prefix)
|
||||
|
||||
|
||||
def canonicalize(text: Union[str, bytes]) -> str:
|
||||
"""Verify that *address* is a valid text form IPv6 address and return its
|
||||
canonical text form. Addresses with scopes are rejected.
|
||||
|
||||
*text*, a ``str`` or ``bytes``, the IPv6 address in textual form.
|
||||
|
||||
Raises ``dns.exception.SyntaxError`` if the text is not valid.
|
||||
"""
|
||||
return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text))
|
||||
|
|
|
@ -393,7 +393,7 @@ class Message:
|
|||
section_number = section
|
||||
section = self.section_from_number(section_number)
|
||||
elif isinstance(section, str):
|
||||
section_number = MessageSection.from_text(section)
|
||||
section_number = self._section_enum.from_text(section)
|
||||
section = self.section_from_number(section_number)
|
||||
else:
|
||||
section_number = self.section_number(section)
|
||||
|
@ -489,6 +489,34 @@ class Message:
|
|||
rrset = None
|
||||
return rrset
|
||||
|
||||
def section_count(self, section: SectionType) -> int:
|
||||
"""Returns the number of records in the specified section.
|
||||
|
||||
*section*, an ``int`` section number, a ``str`` section name, or one of
|
||||
the section attributes of this message. This specifies the
|
||||
the section of the message to count. For example::
|
||||
|
||||
my_message.section_count(my_message.answer)
|
||||
my_message.section_count(dns.message.ANSWER)
|
||||
my_message.section_count("ANSWER")
|
||||
"""
|
||||
|
||||
if isinstance(section, int):
|
||||
section_number = section
|
||||
section = self.section_from_number(section_number)
|
||||
elif isinstance(section, str):
|
||||
section_number = self._section_enum.from_text(section)
|
||||
section = self.section_from_number(section_number)
|
||||
else:
|
||||
section_number = self.section_number(section)
|
||||
count = sum(max(1, len(rrs)) for rrs in section)
|
||||
if section_number == MessageSection.ADDITIONAL:
|
||||
if self.opt is not None:
|
||||
count += 1
|
||||
if self.tsig is not None:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
def _compute_opt_reserve(self) -> int:
|
||||
"""Compute the size required for the OPT RR, padding excluded"""
|
||||
if not self.opt:
|
||||
|
@ -527,6 +555,8 @@ class Message:
|
|||
max_size: int = 0,
|
||||
multi: bool = False,
|
||||
tsig_ctx: Optional[Any] = None,
|
||||
prepend_length: bool = False,
|
||||
prefer_truncation: bool = False,
|
||||
**kw: Dict[str, Any],
|
||||
) -> bytes:
|
||||
"""Return a string containing the message in DNS compressed wire
|
||||
|
@ -549,6 +579,15 @@ class Message:
|
|||
*tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the
|
||||
ongoing TSIG context, used when signing zone transfers.
|
||||
|
||||
*prepend_length*, a ``bool``, should be set to ``True`` if the caller
|
||||
wants the message length prepended to the message itself. This is
|
||||
useful for messages sent over TCP, TLS (DoT), or QUIC (DoQ).
|
||||
|
||||
*prefer_truncation*, a ``bool``, should be set to ``True`` if the caller
|
||||
wants the message to be truncated if it would otherwise exceed the
|
||||
maximum length. If the truncation occurs before the additional section,
|
||||
the TC bit will be set.
|
||||
|
||||
Raises ``dns.exception.TooBig`` if *max_size* was exceeded.
|
||||
|
||||
Returns a ``bytes``.
|
||||
|
@ -570,14 +609,21 @@ class Message:
|
|||
r.reserve(opt_reserve)
|
||||
tsig_reserve = self._compute_tsig_reserve()
|
||||
r.reserve(tsig_reserve)
|
||||
for rrset in self.question:
|
||||
r.add_question(rrset.name, rrset.rdtype, rrset.rdclass)
|
||||
for rrset in self.answer:
|
||||
r.add_rrset(dns.renderer.ANSWER, rrset, **kw)
|
||||
for rrset in self.authority:
|
||||
r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw)
|
||||
for rrset in self.additional:
|
||||
r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw)
|
||||
try:
|
||||
for rrset in self.question:
|
||||
r.add_question(rrset.name, rrset.rdtype, rrset.rdclass)
|
||||
for rrset in self.answer:
|
||||
r.add_rrset(dns.renderer.ANSWER, rrset, **kw)
|
||||
for rrset in self.authority:
|
||||
r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw)
|
||||
for rrset in self.additional:
|
||||
r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw)
|
||||
except dns.exception.TooBig:
|
||||
if prefer_truncation:
|
||||
if r.section < dns.renderer.ADDITIONAL:
|
||||
r.flags |= dns.flags.TC
|
||||
else:
|
||||
raise
|
||||
r.release_reserved()
|
||||
if self.opt is not None:
|
||||
r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve)
|
||||
|
@ -598,7 +644,10 @@ class Message:
|
|||
r.write_header()
|
||||
if multi:
|
||||
self.tsig_ctx = ctx
|
||||
return r.get_wire()
|
||||
wire = r.get_wire()
|
||||
if prepend_length:
|
||||
wire = len(wire).to_bytes(2, "big") + wire
|
||||
return wire
|
||||
|
||||
@staticmethod
|
||||
def _make_tsig(
|
||||
|
@ -777,6 +826,8 @@ class Message:
|
|||
if request_payload is None:
|
||||
request_payload = payload
|
||||
self.request_payload = request_payload
|
||||
if pad < 0:
|
||||
raise ValueError("pad must be non-negative")
|
||||
self.pad = pad
|
||||
|
||||
@property
|
||||
|
@ -826,7 +877,7 @@ class Message:
|
|||
if wanted:
|
||||
self.ednsflags |= dns.flags.DO
|
||||
elif self.opt:
|
||||
self.ednsflags &= ~dns.flags.DO
|
||||
self.ednsflags &= ~int(dns.flags.DO)
|
||||
|
||||
def rcode(self) -> dns.rcode.Rcode:
|
||||
"""Return the rcode.
|
||||
|
@ -1035,7 +1086,6 @@ def _message_factory_from_opcode(opcode):
|
|||
|
||||
|
||||
class _WireReader:
|
||||
|
||||
"""Wire format reader.
|
||||
|
||||
parser: the binary parser
|
||||
|
@ -1335,7 +1385,6 @@ def from_wire(
|
|||
|
||||
|
||||
class _TextReader:
|
||||
|
||||
"""Text format reader.
|
||||
|
||||
tok: the tokenizer.
|
||||
|
@ -1768,30 +1817,34 @@ def make_response(
|
|||
our_payload: int = 8192,
|
||||
fudge: int = 300,
|
||||
tsig_error: int = 0,
|
||||
pad: Optional[int] = None,
|
||||
) -> Message:
|
||||
"""Make a message which is a response for the specified query.
|
||||
The message returned is really a response skeleton; it has all
|
||||
of the infrastructure required of a response, but none of the
|
||||
content.
|
||||
The message returned is really a response skeleton; it has all of the infrastructure
|
||||
required of a response, but none of the content.
|
||||
|
||||
The response's question section is a shallow copy of the query's
|
||||
question section, so the query's question RRsets should not be
|
||||
changed.
|
||||
The response's question section is a shallow copy of the query's question section,
|
||||
so the query's question RRsets should not be changed.
|
||||
|
||||
*query*, a ``dns.message.Message``, the query to respond to.
|
||||
|
||||
*recursion_available*, a ``bool``, should RA be set in the response?
|
||||
|
||||
*our_payload*, an ``int``, the payload size to advertise in EDNS
|
||||
responses.
|
||||
*our_payload*, an ``int``, the payload size to advertise in EDNS responses.
|
||||
|
||||
*fudge*, an ``int``, the TSIG time fudge.
|
||||
|
||||
*tsig_error*, an ``int``, the TSIG error.
|
||||
|
||||
Returns a ``dns.message.Message`` object whose specific class is
|
||||
appropriate for the query. For example, if query is a
|
||||
``dns.update.UpdateMessage``, response will be too.
|
||||
*pad*, a non-negative ``int`` or ``None``. If 0, the default, do not pad; otherwise
|
||||
if not ``None`` add padding bytes to make the message size a multiple of *pad*.
|
||||
Note that if padding is non-zero, an EDNS PADDING option will always be added to the
|
||||
message. If ``None``, add padding following RFC 8467, namely if the request is
|
||||
padded, pad the response to 468 otherwise do not pad.
|
||||
|
||||
Returns a ``dns.message.Message`` object whose specific class is appropriate for the
|
||||
query. For example, if query is a ``dns.update.UpdateMessage``, response will be
|
||||
too.
|
||||
"""
|
||||
|
||||
if query.flags & dns.flags.QR:
|
||||
|
@ -1804,7 +1857,13 @@ def make_response(
|
|||
response.set_opcode(query.opcode())
|
||||
response.question = list(query.question)
|
||||
if query.edns >= 0:
|
||||
response.use_edns(0, 0, our_payload, query.payload)
|
||||
if pad is None:
|
||||
# Set response padding per RFC 8467
|
||||
pad = 0
|
||||
for option in query.options:
|
||||
if option.otype == dns.edns.OptionType.PADDING:
|
||||
pad = 468
|
||||
response.use_edns(0, 0, our_payload, query.payload, pad=pad)
|
||||
if query.had_tsig:
|
||||
response.use_tsig(
|
||||
query.keyring,
|
||||
|
|
217
lib/dns/name.py
217
lib/dns/name.py
|
@ -20,21 +20,23 @@
|
|||
|
||||
import copy
|
||||
import encodings.idna # type: ignore
|
||||
import functools
|
||||
import struct
|
||||
from typing import Any, Dict, Iterable, Optional, Tuple, Union
|
||||
|
||||
try:
|
||||
import idna # type: ignore
|
||||
|
||||
have_idna_2008 = True
|
||||
except ImportError: # pragma: no cover
|
||||
have_idna_2008 = False
|
||||
from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Union
|
||||
|
||||
import dns._features
|
||||
import dns.enum
|
||||
import dns.exception
|
||||
import dns.immutable
|
||||
import dns.wire
|
||||
|
||||
if dns._features.have("idna"):
|
||||
import idna # type: ignore
|
||||
|
||||
have_idna_2008 = True
|
||||
else: # pragma: no cover
|
||||
have_idna_2008 = False
|
||||
|
||||
CompressType = Dict["Name", int]
|
||||
|
||||
|
||||
|
@ -128,6 +130,10 @@ class IDNAException(dns.exception.DNSException):
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class NeedSubdomainOfOrigin(dns.exception.DNSException):
|
||||
"""An absolute name was provided that is not a subdomain of the specified origin."""
|
||||
|
||||
|
||||
_escaped = b'"().;\\@$'
|
||||
_escaped_text = '"().;\\@$'
|
||||
|
||||
|
@ -350,7 +356,6 @@ def _maybe_convert_to_binary(label: Union[bytes, str]) -> bytes:
|
|||
|
||||
@dns.immutable.immutable
|
||||
class Name:
|
||||
|
||||
"""A DNS name.
|
||||
|
||||
The dns.name.Name class represents a DNS name as a tuple of
|
||||
|
@ -843,6 +848,42 @@ class Name:
|
|||
raise NoParent
|
||||
return Name(self.labels[1:])
|
||||
|
||||
def predecessor(self, origin: "Name", prefix_ok: bool = True) -> "Name":
|
||||
"""Return the maximal predecessor of *name* in the DNSSEC ordering in the zone
|
||||
whose origin is *origin*, or return the longest name under *origin* if the
|
||||
name is origin (i.e. wrap around to the longest name, which may still be
|
||||
*origin* due to length considerations.
|
||||
|
||||
The relativity of the name is preserved, so if this name is relative
|
||||
then the method will return a relative name, and likewise if this name
|
||||
is absolute then the predecessor will be absolute.
|
||||
|
||||
*prefix_ok* indicates if prefixing labels is allowed, and
|
||||
defaults to ``True``. Normally it is good to allow this, but if computing
|
||||
a maximal predecessor at a zone cut point then ``False`` must be specified.
|
||||
"""
|
||||
return _handle_relativity_and_call(
|
||||
_absolute_predecessor, self, origin, prefix_ok
|
||||
)
|
||||
|
||||
def successor(self, origin: "Name", prefix_ok: bool = True) -> "Name":
|
||||
"""Return the minimal successor of *name* in the DNSSEC ordering in the zone
|
||||
whose origin is *origin*, or return *origin* if the successor cannot be
|
||||
computed due to name length limitations.
|
||||
|
||||
Note that *origin* is returned in the "too long" cases because wrapping
|
||||
around to the origin is how NSEC records express "end of the zone".
|
||||
|
||||
The relativity of the name is preserved, so if this name is relative
|
||||
then the method will return a relative name, and likewise if this name
|
||||
is absolute then the successor will be absolute.
|
||||
|
||||
*prefix_ok* indicates if prefixing a new minimal label is allowed, and
|
||||
defaults to ``True``. Normally it is good to allow this, but if computing
|
||||
a minimal successor at a zone cut point then ``False`` must be specified.
|
||||
"""
|
||||
return _handle_relativity_and_call(_absolute_successor, self, origin, prefix_ok)
|
||||
|
||||
|
||||
#: The root name, '.'
|
||||
root = Name([b""])
|
||||
|
@ -1082,3 +1123,161 @@ def from_wire(message: bytes, current: int) -> Tuple[Name, int]:
|
|||
parser = dns.wire.Parser(message, current)
|
||||
name = from_wire_parser(parser)
|
||||
return (name, parser.current - current)
|
||||
|
||||
|
||||
# RFC 4471 Support
|
||||
|
||||
_MINIMAL_OCTET = b"\x00"
|
||||
_MINIMAL_OCTET_VALUE = ord(_MINIMAL_OCTET)
|
||||
_SUCCESSOR_PREFIX = Name([_MINIMAL_OCTET])
|
||||
_MAXIMAL_OCTET = b"\xff"
|
||||
_MAXIMAL_OCTET_VALUE = ord(_MAXIMAL_OCTET)
|
||||
_AT_SIGN_VALUE = ord("@")
|
||||
_LEFT_SQUARE_BRACKET_VALUE = ord("[")
|
||||
|
||||
|
||||
def _wire_length(labels):
|
||||
return functools.reduce(lambda v, x: v + len(x) + 1, labels, 0)
|
||||
|
||||
|
||||
def _pad_to_max_name(name):
|
||||
needed = 255 - _wire_length(name.labels)
|
||||
new_labels = []
|
||||
while needed > 64:
|
||||
new_labels.append(_MAXIMAL_OCTET * 63)
|
||||
needed -= 64
|
||||
if needed >= 2:
|
||||
new_labels.append(_MAXIMAL_OCTET * (needed - 1))
|
||||
# Note we're already maximal in the needed == 1 case as while we'd like
|
||||
# to add one more byte as a new label, we can't, as adding a new non-empty
|
||||
# label requires at least 2 bytes.
|
||||
new_labels = list(reversed(new_labels))
|
||||
new_labels.extend(name.labels)
|
||||
return Name(new_labels)
|
||||
|
||||
|
||||
def _pad_to_max_label(label, suffix_labels):
|
||||
length = len(label)
|
||||
# We have to subtract one here to account for the length byte of label.
|
||||
remaining = 255 - _wire_length(suffix_labels) - length - 1
|
||||
if remaining <= 0:
|
||||
# Shouldn't happen!
|
||||
return label
|
||||
needed = min(63 - length, remaining)
|
||||
return label + _MAXIMAL_OCTET * needed
|
||||
|
||||
|
||||
def _absolute_predecessor(name: Name, origin: Name, prefix_ok: bool) -> Name:
|
||||
# This is the RFC 4471 predecessor algorithm using the "absolute method" of section
|
||||
# 3.1.1.
|
||||
#
|
||||
# Our caller must ensure that the name and origin are absolute, and that name is a
|
||||
# subdomain of origin.
|
||||
if name == origin:
|
||||
return _pad_to_max_name(name)
|
||||
least_significant_label = name[0]
|
||||
if least_significant_label == _MINIMAL_OCTET:
|
||||
return name.parent()
|
||||
least_octet = least_significant_label[-1]
|
||||
suffix_labels = name.labels[1:]
|
||||
if least_octet == _MINIMAL_OCTET_VALUE:
|
||||
new_labels = [least_significant_label[:-1]]
|
||||
else:
|
||||
octets = bytearray(least_significant_label)
|
||||
octet = octets[-1]
|
||||
if octet == _LEFT_SQUARE_BRACKET_VALUE:
|
||||
octet = _AT_SIGN_VALUE
|
||||
else:
|
||||
octet -= 1
|
||||
octets[-1] = octet
|
||||
least_significant_label = bytes(octets)
|
||||
new_labels = [_pad_to_max_label(least_significant_label, suffix_labels)]
|
||||
new_labels.extend(suffix_labels)
|
||||
name = Name(new_labels)
|
||||
if prefix_ok:
|
||||
return _pad_to_max_name(name)
|
||||
else:
|
||||
return name
|
||||
|
||||
|
||||
def _absolute_successor(name: Name, origin: Name, prefix_ok: bool) -> Name:
|
||||
# This is the RFC 4471 successor algorithm using the "absolute method" of section
|
||||
# 3.1.2.
|
||||
#
|
||||
# Our caller must ensure that the name and origin are absolute, and that name is a
|
||||
# subdomain of origin.
|
||||
if prefix_ok:
|
||||
# Try prefixing \000 as new label
|
||||
try:
|
||||
return _SUCCESSOR_PREFIX.concatenate(name)
|
||||
except NameTooLong:
|
||||
pass
|
||||
while name != origin:
|
||||
# Try extending the least significant label.
|
||||
least_significant_label = name[0]
|
||||
if len(least_significant_label) < 63:
|
||||
# We may be able to extend the least label with a minimal additional byte.
|
||||
# This is only "may" because we could have a maximal length name even though
|
||||
# the least significant label isn't maximally long.
|
||||
new_labels = [least_significant_label + _MINIMAL_OCTET]
|
||||
new_labels.extend(name.labels[1:])
|
||||
try:
|
||||
return dns.name.Name(new_labels)
|
||||
except dns.name.NameTooLong:
|
||||
pass
|
||||
# We can't extend the label either, so we'll try to increment the least
|
||||
# signficant non-maximal byte in it.
|
||||
octets = bytearray(least_significant_label)
|
||||
# We do this reversed iteration with an explicit indexing variable because
|
||||
# if we find something to increment, we're going to want to truncate everything
|
||||
# to the right of it.
|
||||
for i in range(len(octets) - 1, -1, -1):
|
||||
octet = octets[i]
|
||||
if octet == _MAXIMAL_OCTET_VALUE:
|
||||
# We can't increment this, so keep looking.
|
||||
continue
|
||||
# Finally, something we can increment. We have to apply a special rule for
|
||||
# incrementing "@", sending it to "[", because RFC 4034 6.1 says that when
|
||||
# comparing names, uppercase letters compare as if they were their
|
||||
# lower-case equivalents. If we increment "@" to "A", then it would compare
|
||||
# as "a", which is after "[", "\", "]", "^", "_", and "`", so we would have
|
||||
# skipped the most minimal successor, namely "[".
|
||||
if octet == _AT_SIGN_VALUE:
|
||||
octet = _LEFT_SQUARE_BRACKET_VALUE
|
||||
else:
|
||||
octet += 1
|
||||
octets[i] = octet
|
||||
# We can now truncate all of the maximal values we skipped (if any)
|
||||
new_labels = [bytes(octets[: i + 1])]
|
||||
new_labels.extend(name.labels[1:])
|
||||
# We haven't changed the length of the name, so the Name constructor will
|
||||
# always work.
|
||||
return Name(new_labels)
|
||||
# We couldn't increment, so chop off the least significant label and try
|
||||
# again.
|
||||
name = name.parent()
|
||||
|
||||
# We couldn't increment at all, so return the origin, as wrapping around is the
|
||||
# DNSSEC way.
|
||||
return origin
|
||||
|
||||
|
||||
def _handle_relativity_and_call(
|
||||
function: Callable[[Name, Name, bool], Name],
|
||||
name: Name,
|
||||
origin: Name,
|
||||
prefix_ok: bool,
|
||||
) -> Name:
|
||||
# Make "name" absolute if needed, ensure that the origin is absolute,
|
||||
# call function(), and then relativize the result if needed.
|
||||
if not origin.is_absolute():
|
||||
raise NeedAbsoluteNameOrOrigin
|
||||
relative = not name.is_absolute()
|
||||
if relative:
|
||||
name = name.derelativize(origin)
|
||||
elif not name.is_subdomain(origin):
|
||||
raise NeedSubdomainOfOrigin
|
||||
result_name = function(name, origin, prefix_ok)
|
||||
if relative:
|
||||
result_name = result_name.relativize(origin)
|
||||
return result_name
|
||||
|
|
|
@ -115,6 +115,8 @@ class Do53Nameserver(AddressAndPortNameserver):
|
|||
raise_on_truncation=True,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
ignore_errors=True,
|
||||
ignore_unexpected=True,
|
||||
)
|
||||
return response
|
||||
|
||||
|
@ -153,15 +155,25 @@ class Do53Nameserver(AddressAndPortNameserver):
|
|||
backend=backend,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
ignore_errors=True,
|
||||
ignore_unexpected=True,
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class DoHNameserver(Nameserver):
|
||||
def __init__(self, url: str, bootstrap_address: Optional[str] = None):
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
bootstrap_address: Optional[str] = None,
|
||||
verify: Union[bool, str] = True,
|
||||
want_get: bool = False,
|
||||
):
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.bootstrap_address = bootstrap_address
|
||||
self.verify = verify
|
||||
self.want_get = want_get
|
||||
|
||||
def kind(self):
|
||||
return "DoH"
|
||||
|
@ -195,9 +207,13 @@ class DoHNameserver(Nameserver):
|
|||
request,
|
||||
self.url,
|
||||
timeout=timeout,
|
||||
source=source,
|
||||
source_port=source_port,
|
||||
bootstrap_address=self.bootstrap_address,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
verify=self.verify,
|
||||
post=(not self.want_get),
|
||||
)
|
||||
|
||||
async def async_query(
|
||||
|
@ -215,15 +231,27 @@ class DoHNameserver(Nameserver):
|
|||
request,
|
||||
self.url,
|
||||
timeout=timeout,
|
||||
source=source,
|
||||
source_port=source_port,
|
||||
bootstrap_address=self.bootstrap_address,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
verify=self.verify,
|
||||
post=(not self.want_get),
|
||||
)
|
||||
|
||||
|
||||
class DoTNameserver(AddressAndPortNameserver):
|
||||
def __init__(self, address: str, port: int = 853, hostname: Optional[str] = None):
|
||||
def __init__(
|
||||
self,
|
||||
address: str,
|
||||
port: int = 853,
|
||||
hostname: Optional[str] = None,
|
||||
verify: Union[bool, str] = True,
|
||||
):
|
||||
super().__init__(address, port)
|
||||
self.hostname = hostname
|
||||
self.verify = verify
|
||||
|
||||
def kind(self):
|
||||
return "DoT"
|
||||
|
@ -246,6 +274,7 @@ class DoTNameserver(AddressAndPortNameserver):
|
|||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
server_hostname=self.hostname,
|
||||
verify=self.verify,
|
||||
)
|
||||
|
||||
async def async_query(
|
||||
|
@ -267,6 +296,7 @@ class DoTNameserver(AddressAndPortNameserver):
|
|||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
server_hostname=self.hostname,
|
||||
verify=self.verify,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -70,7 +70,6 @@ class NodeKind(enum.Enum):
|
|||
|
||||
|
||||
class Node:
|
||||
|
||||
"""A Node is a set of rdatasets.
|
||||
|
||||
A node is either a CNAME node or an "other data" node. A CNAME
|
||||
|
|
195
lib/dns/query.py
195
lib/dns/query.py
|
@ -22,12 +22,14 @@ import contextlib
|
|||
import enum
|
||||
import errno
|
||||
import os
|
||||
import os.path
|
||||
import selectors
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
|
||||
import dns._features
|
||||
import dns.exception
|
||||
import dns.inet
|
||||
import dns.message
|
||||
|
@ -57,24 +59,14 @@ def _expiration_for_this_attempt(timeout, expiration):
|
|||
return min(time.time() + timeout, expiration)
|
||||
|
||||
|
||||
_have_httpx = False
|
||||
_have_http2 = False
|
||||
try:
|
||||
import httpcore
|
||||
_have_httpx = dns._features.have("doh")
|
||||
if _have_httpx:
|
||||
import httpcore._backends.sync
|
||||
import httpx
|
||||
|
||||
_CoreNetworkBackend = httpcore.NetworkBackend
|
||||
_CoreSyncStream = httpcore._backends.sync.SyncStream
|
||||
|
||||
_have_httpx = True
|
||||
try:
|
||||
# See if http2 support is available.
|
||||
with httpx.Client(http2=True):
|
||||
_have_http2 = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
class _NetworkBackend(_CoreNetworkBackend):
|
||||
def __init__(self, resolver, local_port, bootstrap_address, family):
|
||||
super().__init__()
|
||||
|
@ -147,7 +139,7 @@ try:
|
|||
resolver, local_port, bootstrap_address, family
|
||||
)
|
||||
|
||||
except ImportError: # pragma: no cover
|
||||
else:
|
||||
|
||||
class _HTTPTransport: # type: ignore
|
||||
def connect_tcp(self, host, port, timeout, local_address):
|
||||
|
@ -161,6 +153,8 @@ try:
|
|||
except ImportError: # pragma: no cover
|
||||
|
||||
class ssl: # type: ignore
|
||||
CERT_NONE = 0
|
||||
|
||||
class WantReadException(Exception):
|
||||
pass
|
||||
|
||||
|
@ -459,7 +453,7 @@ def https(
|
|||
transport = _HTTPTransport(
|
||||
local_address=local_address,
|
||||
http1=True,
|
||||
http2=_have_http2,
|
||||
http2=True,
|
||||
verify=verify,
|
||||
local_port=local_port,
|
||||
bootstrap_address=bootstrap_address,
|
||||
|
@ -470,9 +464,7 @@ def https(
|
|||
if session:
|
||||
cm: contextlib.AbstractContextManager = contextlib.nullcontext(session)
|
||||
else:
|
||||
cm = httpx.Client(
|
||||
http1=True, http2=_have_http2, verify=verify, transport=transport
|
||||
)
|
||||
cm = httpx.Client(http1=True, http2=True, verify=verify, transport=transport)
|
||||
with cm as session:
|
||||
# see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH
|
||||
# GET and POST examples
|
||||
|
@ -577,6 +569,8 @@ def receive_udp(
|
|||
request_mac: Optional[bytes] = b"",
|
||||
ignore_trailing: bool = False,
|
||||
raise_on_truncation: bool = False,
|
||||
ignore_errors: bool = False,
|
||||
query: Optional[dns.message.Message] = None,
|
||||
) -> Any:
|
||||
"""Read a DNS message from a UDP socket.
|
||||
|
||||
|
@ -617,28 +611,58 @@ def receive_udp(
|
|||
``(dns.message.Message, float, tuple)``
|
||||
tuple of the received message, the received time, and the address where
|
||||
the message arrived from.
|
||||
|
||||
*ignore_errors*, a ``bool``. If various format errors or response
|
||||
mismatches occur, ignore them and keep listening for a valid response.
|
||||
The default is ``False``.
|
||||
|
||||
*query*, a ``dns.message.Message`` or ``None``. If not ``None`` and
|
||||
*ignore_errors* is ``True``, check that the received message is a response
|
||||
to this query, and if not keep listening for a valid response.
|
||||
"""
|
||||
|
||||
wire = b""
|
||||
while True:
|
||||
(wire, from_address) = _udp_recv(sock, 65535, expiration)
|
||||
if _matches_destination(
|
||||
if not _matches_destination(
|
||||
sock.family, from_address, destination, ignore_unexpected
|
||||
):
|
||||
break
|
||||
received_time = time.time()
|
||||
r = dns.message.from_wire(
|
||||
wire,
|
||||
keyring=keyring,
|
||||
request_mac=request_mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
raise_on_truncation=raise_on_truncation,
|
||||
)
|
||||
if destination:
|
||||
return (r, received_time)
|
||||
else:
|
||||
return (r, received_time, from_address)
|
||||
continue
|
||||
received_time = time.time()
|
||||
try:
|
||||
r = dns.message.from_wire(
|
||||
wire,
|
||||
keyring=keyring,
|
||||
request_mac=request_mac,
|
||||
one_rr_per_rrset=one_rr_per_rrset,
|
||||
ignore_trailing=ignore_trailing,
|
||||
raise_on_truncation=raise_on_truncation,
|
||||
)
|
||||
except dns.message.Truncated as e:
|
||||
# If we got Truncated and not FORMERR, we at least got the header with TC
|
||||
# set, and very likely the question section, so we'll re-raise if the
|
||||
# message seems to be a response as we need to know when truncation happens.
|
||||
# We need to check that it seems to be a response as we don't want a random
|
||||
# injected message with TC set to cause us to bail out.
|
||||
if (
|
||||
ignore_errors
|
||||
and query is not None
|
||||
and not query.is_response(e.message())
|
||||
):
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
except Exception:
|
||||
if ignore_errors:
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
if ignore_errors and query is not None and not query.is_response(r):
|
||||
continue
|
||||
if destination:
|
||||
return (r, received_time)
|
||||
else:
|
||||
return (r, received_time, from_address)
|
||||
|
||||
|
||||
def udp(
|
||||
|
@ -653,6 +677,7 @@ def udp(
|
|||
ignore_trailing: bool = False,
|
||||
raise_on_truncation: bool = False,
|
||||
sock: Optional[Any] = None,
|
||||
ignore_errors: bool = False,
|
||||
) -> dns.message.Message:
|
||||
"""Return the response obtained after sending a query via UDP.
|
||||
|
||||
|
@ -689,6 +714,10 @@ def udp(
|
|||
if a socket is provided, it must be a nonblocking datagram socket,
|
||||
and the *source* and *source_port* are ignored.
|
||||
|
||||
*ignore_errors*, a ``bool``. If various format errors or response
|
||||
mismatches occur, ignore them and keep listening for a valid response.
|
||||
The default is ``False``.
|
||||
|
||||
Returns a ``dns.message.Message``.
|
||||
"""
|
||||
|
||||
|
@ -713,9 +742,13 @@ def udp(
|
|||
q.mac,
|
||||
ignore_trailing,
|
||||
raise_on_truncation,
|
||||
ignore_errors,
|
||||
q,
|
||||
)
|
||||
r.time = received_time - begin_time
|
||||
if not q.is_response(r):
|
||||
# We don't need to check q.is_response() if we are in ignore_errors mode
|
||||
# as receive_udp() will have checked it.
|
||||
if not (ignore_errors or q.is_response(r)):
|
||||
raise BadResponse
|
||||
return r
|
||||
assert (
|
||||
|
@ -735,48 +768,50 @@ def udp_with_fallback(
|
|||
ignore_trailing: bool = False,
|
||||
udp_sock: Optional[Any] = None,
|
||||
tcp_sock: Optional[Any] = None,
|
||||
ignore_errors: bool = False,
|
||||
) -> Tuple[dns.message.Message, bool]:
|
||||
"""Return the response to the query, trying UDP first and falling back
|
||||
to TCP if UDP results in a truncated response.
|
||||
|
||||
*q*, a ``dns.message.Message``, the query to send
|
||||
|
||||
*where*, a ``str`` containing an IPv4 or IPv6 address, where
|
||||
to send the message.
|
||||
*where*, a ``str`` containing an IPv4 or IPv6 address, where to send the message.
|
||||
|
||||
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
|
||||
query times out. If ``None``, the default, wait forever.
|
||||
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the query
|
||||
times out. If ``None``, the default, wait forever.
|
||||
|
||||
*port*, an ``int``, the port send the message to. The default is 53.
|
||||
|
||||
*source*, a ``str`` containing an IPv4 or IPv6 address, specifying
|
||||
the source address. The default is the wildcard address.
|
||||
*source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source
|
||||
address. The default is the wildcard address.
|
||||
|
||||
*source_port*, an ``int``, the port from which to send the message.
|
||||
The default is 0.
|
||||
*source_port*, an ``int``, the port from which to send the message. The default is
|
||||
0.
|
||||
|
||||
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
|
||||
unexpected sources.
|
||||
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from unexpected
|
||||
sources.
|
||||
|
||||
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
|
||||
RRset.
|
||||
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset.
|
||||
|
||||
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing
|
||||
junk at end of the received message.
|
||||
*ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the
|
||||
received message.
|
||||
|
||||
*udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the
|
||||
UDP query. If ``None``, the default, a socket is created. Note that
|
||||
if a socket is provided, it must be a nonblocking datagram socket,
|
||||
and the *source* and *source_port* are ignored for the UDP query.
|
||||
*udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the UDP query.
|
||||
If ``None``, the default, a socket is created. Note that if a socket is provided,
|
||||
it must be a nonblocking datagram socket, and the *source* and *source_port* are
|
||||
ignored for the UDP query.
|
||||
|
||||
*tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the
|
||||
TCP query. If ``None``, the default, a socket is created. Note that
|
||||
if a socket is provided, it must be a nonblocking connected stream
|
||||
socket, and *where*, *source* and *source_port* are ignored for the TCP
|
||||
query.
|
||||
TCP query. If ``None``, the default, a socket is created. Note that if a socket is
|
||||
provided, it must be a nonblocking connected stream socket, and *where*, *source*
|
||||
and *source_port* are ignored for the TCP query.
|
||||
|
||||
Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True``
|
||||
if and only if TCP was used.
|
||||
*ignore_errors*, a ``bool``. If various format errors or response mismatches occur
|
||||
while listening for UDP, ignore them and keep listening for a valid response. The
|
||||
default is ``False``.
|
||||
|
||||
Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` if and only if
|
||||
TCP was used.
|
||||
"""
|
||||
try:
|
||||
response = udp(
|
||||
|
@ -791,6 +826,7 @@ def udp_with_fallback(
|
|||
ignore_trailing,
|
||||
True,
|
||||
udp_sock,
|
||||
ignore_errors,
|
||||
)
|
||||
return (response, False)
|
||||
except dns.message.Truncated:
|
||||
|
@ -864,14 +900,12 @@ def send_tcp(
|
|||
"""
|
||||
|
||||
if isinstance(what, dns.message.Message):
|
||||
wire = what.to_wire()
|
||||
tcpmsg = what.to_wire(prepend_length=True)
|
||||
else:
|
||||
wire = what
|
||||
l = len(wire)
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = struct.pack("!H", l) + wire
|
||||
# copying the wire into tcpmsg is inefficient, but lets us
|
||||
# avoid writev() or doing a short write that would get pushed
|
||||
# onto the net
|
||||
tcpmsg = len(what).to_bytes(2, "big") + what
|
||||
sent_time = time.time()
|
||||
_net_write(sock, tcpmsg, expiration)
|
||||
return (len(tcpmsg), sent_time)
|
||||
|
@ -1014,6 +1048,28 @@ def _tls_handshake(s, expiration):
|
|||
_wait_for_writable(s, expiration)
|
||||
|
||||
|
||||
def _make_dot_ssl_context(
|
||||
server_hostname: Optional[str], verify: Union[bool, str]
|
||||
) -> ssl.SSLContext:
|
||||
cafile: Optional[str] = None
|
||||
capath: Optional[str] = None
|
||||
if isinstance(verify, str):
|
||||
if os.path.isfile(verify):
|
||||
cafile = verify
|
||||
elif os.path.isdir(verify):
|
||||
capath = verify
|
||||
else:
|
||||
raise ValueError("invalid verify string")
|
||||
ssl_context = ssl.create_default_context(cafile=cafile, capath=capath)
|
||||
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
if server_hostname is None:
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context.set_alpn_protocols(["dot"])
|
||||
if verify is False:
|
||||
ssl_context.verify_mode = ssl.CERT_NONE
|
||||
return ssl_context
|
||||
|
||||
|
||||
def tls(
|
||||
q: dns.message.Message,
|
||||
where: str,
|
||||
|
@ -1026,6 +1082,7 @@ def tls(
|
|||
sock: Optional[ssl.SSLSocket] = None,
|
||||
ssl_context: Optional[ssl.SSLContext] = None,
|
||||
server_hostname: Optional[str] = None,
|
||||
verify: Union[bool, str] = True,
|
||||
) -> dns.message.Message:
|
||||
"""Return the response obtained after sending a query via TLS.
|
||||
|
||||
|
@ -1065,6 +1122,11 @@ def tls(
|
|||
default is ``None``, which means that no hostname is known, and if an
|
||||
SSL context is created, hostname checking will be disabled.
|
||||
|
||||
*verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification
|
||||
of the server is done using the default CA bundle; if ``False``, then no
|
||||
verification is done; if a `str` then it specifies the path to a certificate file or
|
||||
directory which will be used for verification.
|
||||
|
||||
Returns a ``dns.message.Message``.
|
||||
|
||||
"""
|
||||
|
@ -1091,10 +1153,7 @@ def tls(
|
|||
where, port, source, source_port
|
||||
)
|
||||
if ssl_context is None and not sock:
|
||||
ssl_context = ssl.create_default_context()
|
||||
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
if server_hostname is None:
|
||||
ssl_context.check_hostname = False
|
||||
ssl_context = _make_dot_ssl_context(server_hostname, verify)
|
||||
|
||||
with _make_socket(
|
||||
af,
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
try:
|
||||
import dns._features
|
||||
import dns.asyncbackend
|
||||
|
||||
if dns._features.have("doq"):
|
||||
import aioquic.quic.configuration # type: ignore
|
||||
|
||||
import dns.asyncbackend
|
||||
from dns._asyncbackend import NullContext
|
||||
from dns.quic._asyncio import (
|
||||
AsyncioQuicConnection,
|
||||
|
@ -17,7 +19,7 @@ try:
|
|||
|
||||
def null_factory(
|
||||
*args, # pylint: disable=unused-argument
|
||||
**kwargs # pylint: disable=unused-argument
|
||||
**kwargs, # pylint: disable=unused-argument
|
||||
):
|
||||
return NullContext(None)
|
||||
|
||||
|
@ -31,7 +33,7 @@ try:
|
|||
|
||||
_async_factories = {"asyncio": (null_factory, _asyncio_manager_factory)}
|
||||
|
||||
try:
|
||||
if dns._features.have("trio"):
|
||||
import trio
|
||||
|
||||
from dns.quic._trio import ( # pylint: disable=ungrouped-imports
|
||||
|
@ -47,15 +49,13 @@ try:
|
|||
return TrioQuicManager(context, *args, **kwargs)
|
||||
|
||||
_async_factories["trio"] = (_trio_context_factory, _trio_manager_factory)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def factories_for_backend(backend=None):
|
||||
if backend is None:
|
||||
backend = dns.asyncbackend.get_default_backend()
|
||||
return _async_factories[backend.name()]
|
||||
|
||||
except ImportError:
|
||||
else: # pragma: no cover
|
||||
have_quic = False
|
||||
|
||||
from typing import Any
|
||||
|
|
|
@ -101,9 +101,7 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
|||
)
|
||||
if address[0] != self._peer[0] or address[1] != self._peer[1]:
|
||||
continue
|
||||
self._connection.receive_datagram(
|
||||
datagram, self._peer[0], time.time()
|
||||
)
|
||||
self._connection.receive_datagram(datagram, address, time.time())
|
||||
# Wake up the timer in case the sender is sleeping, as there may be
|
||||
# stuff to send now.
|
||||
async with self._wake_timer:
|
||||
|
@ -125,7 +123,7 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
|||
while not self._done:
|
||||
datagrams = self._connection.datagrams_to_send(time.time())
|
||||
for datagram, address in datagrams:
|
||||
assert address == self._peer[0]
|
||||
assert address == self._peer
|
||||
await self._socket.sendto(datagram, self._peer, None)
|
||||
(expiration, interval) = self._get_timer_values()
|
||||
try:
|
||||
|
@ -147,11 +145,14 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
|||
await stream._add_input(event.data, event.end_stream)
|
||||
elif isinstance(event, aioquic.quic.events.HandshakeCompleted):
|
||||
self._handshake_complete.set()
|
||||
elif isinstance(
|
||||
event, aioquic.quic.events.ConnectionTerminated
|
||||
) or isinstance(event, aioquic.quic.events.StreamReset):
|
||||
elif isinstance(event, aioquic.quic.events.ConnectionTerminated):
|
||||
self._done = True
|
||||
self._receiver_task.cancel()
|
||||
elif isinstance(event, aioquic.quic.events.StreamReset):
|
||||
stream = self._streams.get(event.stream_id)
|
||||
if stream:
|
||||
await stream._add_input(b"", True)
|
||||
|
||||
count += 1
|
||||
if count > 10:
|
||||
# yield
|
||||
|
@ -188,7 +189,6 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
|||
self._connection.close()
|
||||
# sender might be blocked on this, so set it
|
||||
self._socket_created.set()
|
||||
await self._socket.close()
|
||||
async with self._wake_timer:
|
||||
self._wake_timer.notify_all()
|
||||
try:
|
||||
|
@ -199,14 +199,19 @@ class AsyncioQuicConnection(AsyncQuicConnection):
|
|||
await self._sender_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
await self._socket.close()
|
||||
|
||||
|
||||
class AsyncioQuicManager(AsyncQuicManager):
|
||||
def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None):
|
||||
super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name)
|
||||
|
||||
def connect(self, address, port=853, source=None, source_port=0):
|
||||
(connection, start) = self._connect(address, port, source, source_port)
|
||||
def connect(
|
||||
self, address, port=853, source=None, source_port=0, want_session_ticket=True
|
||||
):
|
||||
(connection, start) = self._connect(
|
||||
address, port, source, source_port, want_session_ticket
|
||||
)
|
||||
if start:
|
||||
connection.run()
|
||||
return connection
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
|
@ -11,6 +13,10 @@ import aioquic.quic.connection # type: ignore
|
|||
import dns.inet
|
||||
|
||||
QUIC_MAX_DATAGRAM = 2048
|
||||
MAX_SESSION_TICKETS = 8
|
||||
# If we hit the max sessions limit we will delete this many of the oldest connections.
|
||||
# The value must be a integer > 0 and <= MAX_SESSION_TICKETS.
|
||||
SESSIONS_TO_DELETE = MAX_SESSION_TICKETS // 4
|
||||
|
||||
|
||||
class UnexpectedEOF(Exception):
|
||||
|
@ -79,7 +85,10 @@ class BaseQuicStream:
|
|||
|
||||
def _common_add_input(self, data, is_end):
|
||||
self._buffer.put(data, is_end)
|
||||
return self._expecting > 0 and self._buffer.have(self._expecting)
|
||||
try:
|
||||
return self._expecting > 0 and self._buffer.have(self._expecting)
|
||||
except UnexpectedEOF:
|
||||
return True
|
||||
|
||||
def _close(self):
|
||||
self._connection.close_stream(self._stream_id)
|
||||
|
@ -142,6 +151,7 @@ class BaseQuicManager:
|
|||
def __init__(self, conf, verify_mode, connection_factory, server_name=None):
|
||||
self._connections = {}
|
||||
self._connection_factory = connection_factory
|
||||
self._session_tickets = {}
|
||||
if conf is None:
|
||||
verify_path = None
|
||||
if isinstance(verify_mode, str):
|
||||
|
@ -156,12 +166,35 @@ class BaseQuicManager:
|
|||
conf.load_verify_locations(verify_path)
|
||||
self._conf = conf
|
||||
|
||||
def _connect(self, address, port=853, source=None, source_port=0):
|
||||
def _connect(
|
||||
self, address, port=853, source=None, source_port=0, want_session_ticket=True
|
||||
):
|
||||
connection = self._connections.get((address, port))
|
||||
if connection is not None:
|
||||
return (connection, False)
|
||||
qconn = aioquic.quic.connection.QuicConnection(configuration=self._conf)
|
||||
qconn.connect(address, time.time())
|
||||
conf = self._conf
|
||||
if want_session_ticket:
|
||||
try:
|
||||
session_ticket = self._session_tickets.pop((address, port))
|
||||
# We found a session ticket, so make a configuration that uses it.
|
||||
conf = copy.copy(conf)
|
||||
conf.session_ticket = session_ticket
|
||||
except KeyError:
|
||||
# No session ticket.
|
||||
pass
|
||||
# Whether or not we found a session ticket, we want a handler to save
|
||||
# one.
|
||||
session_ticket_handler = functools.partial(
|
||||
self.save_session_ticket, address, port
|
||||
)
|
||||
else:
|
||||
session_ticket_handler = None
|
||||
qconn = aioquic.quic.connection.QuicConnection(
|
||||
configuration=conf,
|
||||
session_ticket_handler=session_ticket_handler,
|
||||
)
|
||||
lladdress = dns.inet.low_level_address_tuple((address, port))
|
||||
qconn.connect(lladdress, time.time())
|
||||
connection = self._connection_factory(
|
||||
qconn, address, port, source, source_port, self
|
||||
)
|
||||
|
@ -174,6 +207,17 @@ class BaseQuicManager:
|
|||
except KeyError:
|
||||
pass
|
||||
|
||||
def save_session_ticket(self, address, port, ticket):
|
||||
# We rely on dictionaries keys() being in insertion order here. We
|
||||
# can't just popitem() as that would be LIFO which is the opposite of
|
||||
# what we want.
|
||||
l = len(self._session_tickets)
|
||||
if l >= MAX_SESSION_TICKETS:
|
||||
keys_to_delete = list(self._session_tickets.keys())[0:SESSIONS_TO_DELETE]
|
||||
for key in keys_to_delete:
|
||||
del self._session_tickets[key]
|
||||
self._session_tickets[(address, port)] = ticket
|
||||
|
||||
|
||||
class AsyncQuicManager(BaseQuicManager):
|
||||
def connect(self, address, port=853, source=None, source_port=0):
|
||||
|
|
|
@ -82,10 +82,6 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
def __init__(self, connection, address, port, source, source_port, manager):
|
||||
super().__init__(connection, address, port, source, source_port, manager)
|
||||
self._socket = socket.socket(self._af, socket.SOCK_DGRAM, 0)
|
||||
self._socket.connect(self._peer)
|
||||
(self._send_wakeup, self._receive_wakeup) = socket.socketpair()
|
||||
self._receive_wakeup.setblocking(False)
|
||||
self._socket.setblocking(False)
|
||||
if self._source is not None:
|
||||
try:
|
||||
self._socket.bind(
|
||||
|
@ -94,6 +90,10 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
except Exception:
|
||||
self._socket.close()
|
||||
raise
|
||||
self._socket.connect(self._peer)
|
||||
(self._send_wakeup, self._receive_wakeup) = socket.socketpair()
|
||||
self._receive_wakeup.setblocking(False)
|
||||
self._socket.setblocking(False)
|
||||
self._handshake_complete = threading.Event()
|
||||
self._worker_thread = None
|
||||
self._lock = threading.Lock()
|
||||
|
@ -107,7 +107,7 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
except BlockingIOError:
|
||||
return
|
||||
with self._lock:
|
||||
self._connection.receive_datagram(datagram, self._peer[0], time.time())
|
||||
self._connection.receive_datagram(datagram, self._peer, time.time())
|
||||
|
||||
def _drain_wakeup(self):
|
||||
while True:
|
||||
|
@ -128,6 +128,8 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
key.data()
|
||||
with self._lock:
|
||||
self._handle_timer(expiration)
|
||||
self._handle_events()
|
||||
with self._lock:
|
||||
datagrams = self._connection.datagrams_to_send(time.time())
|
||||
for datagram, _ in datagrams:
|
||||
try:
|
||||
|
@ -135,7 +137,6 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
except BlockingIOError:
|
||||
# we let QUIC handle any lossage
|
||||
pass
|
||||
self._handle_events()
|
||||
finally:
|
||||
with self._lock:
|
||||
self._done = True
|
||||
|
@ -155,11 +156,14 @@ class SyncQuicConnection(BaseQuicConnection):
|
|||
stream._add_input(event.data, event.end_stream)
|
||||
elif isinstance(event, aioquic.quic.events.HandshakeCompleted):
|
||||
self._handshake_complete.set()
|
||||
elif isinstance(
|
||||
event, aioquic.quic.events.ConnectionTerminated
|
||||
) or isinstance(event, aioquic.quic.events.StreamReset):
|
||||
elif isinstance(event, aioquic.quic.events.ConnectionTerminated):
|
||||
with self._lock:
|
||||
self._done = True
|
||||
elif isinstance(event, aioquic.quic.events.StreamReset):
|
||||
with self._lock:
|
||||
stream = self._streams.get(event.stream_id)
|
||||
if stream:
|
||||
stream._add_input(b"", True)
|
||||
|
||||
def write(self, stream, data, is_end=False):
|
||||
with self._lock:
|
||||
|
@ -203,9 +207,13 @@ class SyncQuicManager(BaseQuicManager):
|
|||
super().__init__(conf, verify_mode, SyncQuicConnection, server_name)
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def connect(self, address, port=853, source=None, source_port=0):
|
||||
def connect(
|
||||
self, address, port=853, source=None, source_port=0, want_session_ticket=True
|
||||
):
|
||||
with self._lock:
|
||||
(connection, start) = self._connect(address, port, source, source_port)
|
||||
(connection, start) = self._connect(
|
||||
address, port, source, source_port, want_session_ticket
|
||||
)
|
||||
if start:
|
||||
connection.run()
|
||||
return connection
|
||||
|
@ -214,6 +222,10 @@ class SyncQuicManager(BaseQuicManager):
|
|||
with self._lock:
|
||||
super().closed(address, port)
|
||||
|
||||
def save_session_ticket(self, address, port, ticket):
|
||||
with self._lock:
|
||||
super().save_session_ticket(address, port, ticket)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
|
|
|
@ -76,30 +76,43 @@ class TrioQuicConnection(AsyncQuicConnection):
|
|||
def __init__(self, connection, address, port, source, source_port, manager=None):
|
||||
super().__init__(connection, address, port, source, source_port, manager)
|
||||
self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0)
|
||||
if self._source:
|
||||
trio.socket.bind(dns.inet.low_level_address_tuple(self._source, self._af))
|
||||
self._handshake_complete = trio.Event()
|
||||
self._run_done = trio.Event()
|
||||
self._worker_scope = None
|
||||
self._send_pending = False
|
||||
|
||||
async def _worker(self):
|
||||
try:
|
||||
if self._source:
|
||||
await self._socket.bind(
|
||||
dns.inet.low_level_address_tuple(self._source, self._af)
|
||||
)
|
||||
await self._socket.connect(self._peer)
|
||||
while not self._done:
|
||||
(expiration, interval) = self._get_timer_values(False)
|
||||
if self._send_pending:
|
||||
# Do not block forever if sends are pending. Even though we
|
||||
# have a wake-up mechanism if we've already started the blocking
|
||||
# read, the possibility of context switching in send means that
|
||||
# more writes can happen while we have no wake up context, so
|
||||
# we need self._send_pending to avoid (effectively) a "lost wakeup"
|
||||
# race.
|
||||
interval = 0.0
|
||||
with trio.CancelScope(
|
||||
deadline=trio.current_time() + interval
|
||||
) as self._worker_scope:
|
||||
datagram = await self._socket.recv(QUIC_MAX_DATAGRAM)
|
||||
self._connection.receive_datagram(
|
||||
datagram, self._peer[0], time.time()
|
||||
)
|
||||
self._connection.receive_datagram(datagram, self._peer, time.time())
|
||||
self._worker_scope = None
|
||||
self._handle_timer(expiration)
|
||||
await self._handle_events()
|
||||
# We clear this now, before sending anything, as sending can cause
|
||||
# context switches that do more sends. We want to know if that
|
||||
# happens so we don't block a long time on the recv() above.
|
||||
self._send_pending = False
|
||||
datagrams = self._connection.datagrams_to_send(time.time())
|
||||
for datagram, _ in datagrams:
|
||||
await self._socket.send(datagram)
|
||||
await self._handle_events()
|
||||
finally:
|
||||
self._done = True
|
||||
self._handshake_complete.set()
|
||||
|
@ -116,11 +129,13 @@ class TrioQuicConnection(AsyncQuicConnection):
|
|||
await stream._add_input(event.data, event.end_stream)
|
||||
elif isinstance(event, aioquic.quic.events.HandshakeCompleted):
|
||||
self._handshake_complete.set()
|
||||
elif isinstance(
|
||||
event, aioquic.quic.events.ConnectionTerminated
|
||||
) or isinstance(event, aioquic.quic.events.StreamReset):
|
||||
elif isinstance(event, aioquic.quic.events.ConnectionTerminated):
|
||||
self._done = True
|
||||
self._socket.close()
|
||||
elif isinstance(event, aioquic.quic.events.StreamReset):
|
||||
stream = self._streams.get(event.stream_id)
|
||||
if stream:
|
||||
await stream._add_input(b"", True)
|
||||
count += 1
|
||||
if count > 10:
|
||||
# yield
|
||||
|
@ -129,6 +144,7 @@ class TrioQuicConnection(AsyncQuicConnection):
|
|||
|
||||
async def write(self, stream, data, is_end=False):
|
||||
self._connection.send_stream_data(stream, data, is_end)
|
||||
self._send_pending = True
|
||||
if self._worker_scope is not None:
|
||||
self._worker_scope.cancel()
|
||||
|
||||
|
@ -159,6 +175,7 @@ class TrioQuicConnection(AsyncQuicConnection):
|
|||
self._manager.closed(self._peer[0], self._peer[1])
|
||||
self._closed = True
|
||||
self._connection.close()
|
||||
self._send_pending = True
|
||||
if self._worker_scope is not None:
|
||||
self._worker_scope.cancel()
|
||||
await self._run_done.wait()
|
||||
|
@ -171,8 +188,12 @@ class TrioQuicManager(AsyncQuicManager):
|
|||
super().__init__(conf, verify_mode, TrioQuicConnection, server_name)
|
||||
self._nursery = nursery
|
||||
|
||||
def connect(self, address, port=853, source=None, source_port=0):
|
||||
(connection, start) = self._connect(address, port, source, source_port)
|
||||
def connect(
|
||||
self, address, port=853, source=None, source_port=0, want_session_ticket=True
|
||||
):
|
||||
(connection, start) = self._connect(
|
||||
address, port, source, source_port, want_session_ticket
|
||||
)
|
||||
if start:
|
||||
self._nursery.start_soon(connection.run)
|
||||
return connection
|
||||
|
|
|
@ -199,7 +199,7 @@ class Rdata:
|
|||
self,
|
||||
origin: Optional[dns.name.Name] = None,
|
||||
relativize: bool = True,
|
||||
**kw: Dict[str, Any]
|
||||
**kw: Dict[str, Any],
|
||||
) -> str:
|
||||
"""Convert an rdata to text format.
|
||||
|
||||
|
@ -547,9 +547,7 @@ class Rdata:
|
|||
@classmethod
|
||||
def _as_ipv4_address(cls, value):
|
||||
if isinstance(value, str):
|
||||
# call to check validity
|
||||
dns.ipv4.inet_aton(value)
|
||||
return value
|
||||
return dns.ipv4.canonicalize(value)
|
||||
elif isinstance(value, bytes):
|
||||
return dns.ipv4.inet_ntoa(value)
|
||||
else:
|
||||
|
@ -558,9 +556,7 @@ class Rdata:
|
|||
@classmethod
|
||||
def _as_ipv6_address(cls, value):
|
||||
if isinstance(value, str):
|
||||
# call to check validity
|
||||
dns.ipv6.inet_aton(value)
|
||||
return value
|
||||
return dns.ipv6.canonicalize(value)
|
||||
elif isinstance(value, bytes):
|
||||
return dns.ipv6.inet_ntoa(value)
|
||||
else:
|
||||
|
@ -604,7 +600,6 @@ class Rdata:
|
|||
|
||||
@dns.immutable.immutable
|
||||
class GenericRdata(Rdata):
|
||||
|
||||
"""Generic Rdata Class
|
||||
|
||||
This class is used for rdata types for which we have no better
|
||||
|
@ -621,7 +616,7 @@ class GenericRdata(Rdata):
|
|||
self,
|
||||
origin: Optional[dns.name.Name] = None,
|
||||
relativize: bool = True,
|
||||
**kw: Dict[str, Any]
|
||||
**kw: Dict[str, Any],
|
||||
) -> str:
|
||||
return r"\# %d " % len(self.data) + _hexify(self.data, **kw)
|
||||
|
||||
|
@ -647,9 +642,9 @@ class GenericRdata(Rdata):
|
|||
return cls(rdclass, rdtype, parser.get_remaining())
|
||||
|
||||
|
||||
_rdata_classes: Dict[
|
||||
Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any
|
||||
] = {}
|
||||
_rdata_classes: Dict[Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any] = (
|
||||
{}
|
||||
)
|
||||
_module_prefix = "dns.rdtypes"
|
||||
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import dns.name
|
|||
import dns.rdata
|
||||
import dns.rdataclass
|
||||
import dns.rdatatype
|
||||
import dns.renderer
|
||||
import dns.set
|
||||
import dns.ttl
|
||||
|
||||
|
@ -45,7 +46,6 @@ class IncompatibleTypes(dns.exception.DNSException):
|
|||
|
||||
|
||||
class Rdataset(dns.set.Set):
|
||||
|
||||
"""A DNS rdataset."""
|
||||
|
||||
__slots__ = ["rdclass", "rdtype", "covers", "ttl"]
|
||||
|
@ -316,11 +316,9 @@ class Rdataset(dns.set.Set):
|
|||
want_shuffle = False
|
||||
else:
|
||||
rdclass = self.rdclass
|
||||
file.seek(0, io.SEEK_END)
|
||||
if len(self) == 0:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
|
||||
file.write(stuff)
|
||||
file.write(struct.pack("!HHIH", self.rdtype, rdclass, 0, 0))
|
||||
return 1
|
||||
else:
|
||||
l: Union[Rdataset, List[dns.rdata.Rdata]]
|
||||
|
@ -331,16 +329,9 @@ class Rdataset(dns.set.Set):
|
|||
l = self
|
||||
for rd in l:
|
||||
name.to_wire(file, compress, origin)
|
||||
stuff = struct.pack("!HHIH", self.rdtype, rdclass, self.ttl, 0)
|
||||
file.write(stuff)
|
||||
start = file.tell()
|
||||
rd.to_wire(file, compress, origin)
|
||||
end = file.tell()
|
||||
assert end - start < 65536
|
||||
file.seek(start - 2)
|
||||
stuff = struct.pack("!H", end - start)
|
||||
file.write(stuff)
|
||||
file.seek(0, io.SEEK_END)
|
||||
file.write(struct.pack("!HHI", self.rdtype, rdclass, self.ttl))
|
||||
with dns.renderer.prefixed_length(file, 2):
|
||||
rd.to_wire(file, compress, origin)
|
||||
return len(self)
|
||||
|
||||
def match(
|
||||
|
@ -373,7 +364,6 @@ class Rdataset(dns.set.Set):
|
|||
|
||||
@dns.immutable.immutable
|
||||
class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals]
|
||||
|
||||
"""An immutable DNS rdataset."""
|
||||
|
||||
_clone_class = Rdataset
|
||||
|
|
|
@ -21,7 +21,6 @@ import dns.rdtypes.mxbase
|
|||
|
||||
@dns.immutable.immutable
|
||||
class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX):
|
||||
|
||||
"""AFSDB record"""
|
||||
|
||||
# Use the property mechanism to make "subtype" an alias for the
|
||||
|
|
|
@ -32,7 +32,6 @@ class Relay(dns.rdtypes.util.Gateway):
|
|||
|
||||
@dns.immutable.immutable
|
||||
class AMTRELAY(dns.rdata.Rdata):
|
||||
|
||||
"""AMTRELAY record"""
|
||||
|
||||
# see: RFC 8777
|
||||
|
|
|
@ -21,7 +21,6 @@ import dns.rdtypes.txtbase
|
|||
|
||||
@dns.immutable.immutable
|
||||
class AVC(dns.rdtypes.txtbase.TXTBase):
|
||||
|
||||
"""AVC record"""
|
||||
|
||||
# See: IANA dns parameters for AVC
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue