mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-22 06:13:25 -07:00
Merge branch 'nightly' into dependabot/pip/nightly/importlib-metadata-7.1.0
This commit is contained in:
commit
25e8bf2ae4
974 changed files with 168346 additions and 16026 deletions
2
.github/workflows/publish-docker.yml
vendored
2
.github/workflows/publish-docker.yml
vendored
|
@ -47,7 +47,7 @@ jobs:
|
||||||
version: latest
|
version: latest
|
||||||
|
|
||||||
- name: Cache Docker Layers
|
- name: Cache Docker Layers
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
|
|
2
.github/workflows/publish-installers.yml
vendored
2
.github/workflows/publish-installers.yml
vendored
|
@ -129,7 +129,7 @@ jobs:
|
||||||
echo "$EOF" >> $GITHUB_OUTPUT
|
echo "$EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Create Release
|
- name: Create Release
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v2
|
||||||
id: create_release
|
id: create_release
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GHACTIONS_TOKEN }}
|
||||||
|
|
BIN
lib/PyWin32.chm
Normal file
BIN
lib/PyWin32.chm
Normal file
Binary file not shown.
74
lib/adodbapi/__init__.py
Normal file
74
lib/adodbapi/__init__.py
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||||
|
|
||||||
|
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||||
|
* http://sourceforge.net/projects/adodbapi
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .adodbapi import Connection, Cursor, __version__, connect, dateconverter
|
||||||
|
from .apibase import (
|
||||||
|
BINARY,
|
||||||
|
DATETIME,
|
||||||
|
NUMBER,
|
||||||
|
ROWID,
|
||||||
|
STRING,
|
||||||
|
DatabaseError,
|
||||||
|
DataError,
|
||||||
|
Error,
|
||||||
|
FetchFailedError,
|
||||||
|
IntegrityError,
|
||||||
|
InterfaceError,
|
||||||
|
InternalError,
|
||||||
|
NotSupportedError,
|
||||||
|
OperationalError,
|
||||||
|
ProgrammingError,
|
||||||
|
Warning,
|
||||||
|
apilevel,
|
||||||
|
paramstyle,
|
||||||
|
threadsafety,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def Binary(aString):
|
||||||
|
"""This function constructs an object capable of holding a binary (long) string value."""
|
||||||
|
return bytes(aString)
|
||||||
|
|
||||||
|
|
||||||
|
def Date(year, month, day):
|
||||||
|
"This function constructs an object holding a date value."
|
||||||
|
return dateconverter.Date(year, month, day)
|
||||||
|
|
||||||
|
|
||||||
|
def Time(hour, minute, second):
|
||||||
|
"This function constructs an object holding a time value."
|
||||||
|
return dateconverter.Time(hour, minute, second)
|
||||||
|
|
||||||
|
|
||||||
|
def Timestamp(year, month, day, hour, minute, second):
|
||||||
|
"This function constructs an object holding a time stamp value."
|
||||||
|
return dateconverter.Timestamp(year, month, day, hour, minute, second)
|
||||||
|
|
||||||
|
|
||||||
|
def DateFromTicks(ticks):
|
||||||
|
"""This function constructs an object holding a date value from the given ticks value
|
||||||
|
(number of seconds since the epoch; see the documentation of the standard Python time module for details).
|
||||||
|
"""
|
||||||
|
return Date(*time.gmtime(ticks)[:3])
|
||||||
|
|
||||||
|
|
||||||
|
def TimeFromTicks(ticks):
|
||||||
|
"""This function constructs an object holding a time value from the given ticks value
|
||||||
|
(number of seconds since the epoch; see the documentation of the standard Python time module for details).
|
||||||
|
"""
|
||||||
|
return Time(*time.gmtime(ticks)[3:6])
|
||||||
|
|
||||||
|
|
||||||
|
def TimestampFromTicks(ticks):
|
||||||
|
"""This function constructs an object holding a time stamp value from the given
|
||||||
|
ticks value (number of seconds since the epoch;
|
||||||
|
see the documentation of the standard Python time module for details)."""
|
||||||
|
return Timestamp(*time.gmtime(ticks)[:6])
|
||||||
|
|
||||||
|
|
||||||
|
version = "adodbapi v" + __version__
|
281
lib/adodbapi/ado_consts.py
Normal file
281
lib/adodbapi/ado_consts.py
Normal file
|
@ -0,0 +1,281 @@
|
||||||
|
# ADO enumerated constants documented on MSDN:
|
||||||
|
# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx
|
||||||
|
|
||||||
|
# IsolationLevelEnum
|
||||||
|
adXactUnspecified = -1
|
||||||
|
adXactBrowse = 0x100
|
||||||
|
adXactChaos = 0x10
|
||||||
|
adXactCursorStability = 0x1000
|
||||||
|
adXactIsolated = 0x100000
|
||||||
|
adXactReadCommitted = 0x1000
|
||||||
|
adXactReadUncommitted = 0x100
|
||||||
|
adXactRepeatableRead = 0x10000
|
||||||
|
adXactSerializable = 0x100000
|
||||||
|
|
||||||
|
# CursorLocationEnum
|
||||||
|
adUseClient = 3
|
||||||
|
adUseServer = 2
|
||||||
|
|
||||||
|
# CursorTypeEnum
|
||||||
|
adOpenDynamic = 2
|
||||||
|
adOpenForwardOnly = 0
|
||||||
|
adOpenKeyset = 1
|
||||||
|
adOpenStatic = 3
|
||||||
|
adOpenUnspecified = -1
|
||||||
|
|
||||||
|
# CommandTypeEnum
|
||||||
|
adCmdText = 1
|
||||||
|
adCmdStoredProc = 4
|
||||||
|
adSchemaTables = 20
|
||||||
|
|
||||||
|
# ParameterDirectionEnum
|
||||||
|
adParamInput = 1
|
||||||
|
adParamInputOutput = 3
|
||||||
|
adParamOutput = 2
|
||||||
|
adParamReturnValue = 4
|
||||||
|
adParamUnknown = 0
|
||||||
|
directions = {
|
||||||
|
0: "Unknown",
|
||||||
|
1: "Input",
|
||||||
|
2: "Output",
|
||||||
|
3: "InputOutput",
|
||||||
|
4: "Return",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def ado_direction_name(ado_dir):
|
||||||
|
try:
|
||||||
|
return "adParam" + directions[ado_dir]
|
||||||
|
except:
|
||||||
|
return "unknown direction (" + str(ado_dir) + ")"
|
||||||
|
|
||||||
|
|
||||||
|
# ObjectStateEnum
|
||||||
|
adStateClosed = 0
|
||||||
|
adStateOpen = 1
|
||||||
|
adStateConnecting = 2
|
||||||
|
adStateExecuting = 4
|
||||||
|
adStateFetching = 8
|
||||||
|
|
||||||
|
# FieldAttributeEnum
|
||||||
|
adFldMayBeNull = 0x40
|
||||||
|
|
||||||
|
# ConnectModeEnum
|
||||||
|
adModeUnknown = 0
|
||||||
|
adModeRead = 1
|
||||||
|
adModeWrite = 2
|
||||||
|
adModeReadWrite = 3
|
||||||
|
adModeShareDenyRead = 4
|
||||||
|
adModeShareDenyWrite = 8
|
||||||
|
adModeShareExclusive = 12
|
||||||
|
adModeShareDenyNone = 16
|
||||||
|
adModeRecursive = 0x400000
|
||||||
|
|
||||||
|
# XactAttributeEnum
|
||||||
|
adXactCommitRetaining = 131072
|
||||||
|
adXactAbortRetaining = 262144
|
||||||
|
|
||||||
|
ado_error_TIMEOUT = -2147217871
|
||||||
|
|
||||||
|
# DataTypeEnum - ADO Data types documented at:
|
||||||
|
# http://msdn2.microsoft.com/en-us/library/ms675318.aspx
|
||||||
|
adArray = 0x2000
|
||||||
|
adEmpty = 0x0
|
||||||
|
adBSTR = 0x8
|
||||||
|
adBigInt = 0x14
|
||||||
|
adBinary = 0x80
|
||||||
|
adBoolean = 0xB
|
||||||
|
adChapter = 0x88
|
||||||
|
adChar = 0x81
|
||||||
|
adCurrency = 0x6
|
||||||
|
adDBDate = 0x85
|
||||||
|
adDBTime = 0x86
|
||||||
|
adDBTimeStamp = 0x87
|
||||||
|
adDate = 0x7
|
||||||
|
adDecimal = 0xE
|
||||||
|
adDouble = 0x5
|
||||||
|
adError = 0xA
|
||||||
|
adFileTime = 0x40
|
||||||
|
adGUID = 0x48
|
||||||
|
adIDispatch = 0x9
|
||||||
|
adIUnknown = 0xD
|
||||||
|
adInteger = 0x3
|
||||||
|
adLongVarBinary = 0xCD
|
||||||
|
adLongVarChar = 0xC9
|
||||||
|
adLongVarWChar = 0xCB
|
||||||
|
adNumeric = 0x83
|
||||||
|
adPropVariant = 0x8A
|
||||||
|
adSingle = 0x4
|
||||||
|
adSmallInt = 0x2
|
||||||
|
adTinyInt = 0x10
|
||||||
|
adUnsignedBigInt = 0x15
|
||||||
|
adUnsignedInt = 0x13
|
||||||
|
adUnsignedSmallInt = 0x12
|
||||||
|
adUnsignedTinyInt = 0x11
|
||||||
|
adUserDefined = 0x84
|
||||||
|
adVarBinary = 0xCC
|
||||||
|
adVarChar = 0xC8
|
||||||
|
adVarNumeric = 0x8B
|
||||||
|
adVarWChar = 0xCA
|
||||||
|
adVariant = 0xC
|
||||||
|
adWChar = 0x82
|
||||||
|
# Additional constants used by introspection but not ADO itself
|
||||||
|
AUTO_FIELD_MARKER = -1000
|
||||||
|
|
||||||
|
adTypeNames = {
|
||||||
|
adBSTR: "adBSTR",
|
||||||
|
adBigInt: "adBigInt",
|
||||||
|
adBinary: "adBinary",
|
||||||
|
adBoolean: "adBoolean",
|
||||||
|
adChapter: "adChapter",
|
||||||
|
adChar: "adChar",
|
||||||
|
adCurrency: "adCurrency",
|
||||||
|
adDBDate: "adDBDate",
|
||||||
|
adDBTime: "adDBTime",
|
||||||
|
adDBTimeStamp: "adDBTimeStamp",
|
||||||
|
adDate: "adDate",
|
||||||
|
adDecimal: "adDecimal",
|
||||||
|
adDouble: "adDouble",
|
||||||
|
adEmpty: "adEmpty",
|
||||||
|
adError: "adError",
|
||||||
|
adFileTime: "adFileTime",
|
||||||
|
adGUID: "adGUID",
|
||||||
|
adIDispatch: "adIDispatch",
|
||||||
|
adIUnknown: "adIUnknown",
|
||||||
|
adInteger: "adInteger",
|
||||||
|
adLongVarBinary: "adLongVarBinary",
|
||||||
|
adLongVarChar: "adLongVarChar",
|
||||||
|
adLongVarWChar: "adLongVarWChar",
|
||||||
|
adNumeric: "adNumeric",
|
||||||
|
adPropVariant: "adPropVariant",
|
||||||
|
adSingle: "adSingle",
|
||||||
|
adSmallInt: "adSmallInt",
|
||||||
|
adTinyInt: "adTinyInt",
|
||||||
|
adUnsignedBigInt: "adUnsignedBigInt",
|
||||||
|
adUnsignedInt: "adUnsignedInt",
|
||||||
|
adUnsignedSmallInt: "adUnsignedSmallInt",
|
||||||
|
adUnsignedTinyInt: "adUnsignedTinyInt",
|
||||||
|
adUserDefined: "adUserDefined",
|
||||||
|
adVarBinary: "adVarBinary",
|
||||||
|
adVarChar: "adVarChar",
|
||||||
|
adVarNumeric: "adVarNumeric",
|
||||||
|
adVarWChar: "adVarWChar",
|
||||||
|
adVariant: "adVariant",
|
||||||
|
adWChar: "adWChar",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def ado_type_name(ado_type):
|
||||||
|
return adTypeNames.get(ado_type, "unknown type (" + str(ado_type) + ")")
|
||||||
|
|
||||||
|
|
||||||
|
# here in decimal, sorted by value
|
||||||
|
# adEmpty 0 Specifies no value (DBTYPE_EMPTY).
|
||||||
|
# adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2).
|
||||||
|
# adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4).
|
||||||
|
# adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4).
|
||||||
|
# adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8).
|
||||||
|
# adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number
|
||||||
|
# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000.
|
||||||
|
# adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is
|
||||||
|
# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day.
|
||||||
|
# adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR).
|
||||||
|
# adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH).
|
||||||
|
# adError 10 Indicates a 32-bit error code (DBTYPE_ERROR).
|
||||||
|
# adBoolean 11 Indicates a boolean value (DBTYPE_BOOL).
|
||||||
|
# adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT).
|
||||||
|
# adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN).
|
||||||
|
# adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL).
|
||||||
|
# adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1).
|
||||||
|
# adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1).
|
||||||
|
# adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2).
|
||||||
|
# adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4).
|
||||||
|
# adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8).
|
||||||
|
# adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8).
|
||||||
|
# adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since
|
||||||
|
# January 1, 1601 (DBTYPE_FILETIME).
|
||||||
|
# adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID).
|
||||||
|
# adBinary 128 Indicates a binary value (DBTYPE_BYTES).
|
||||||
|
# adChar 129 Indicates a string value (DBTYPE_STR).
|
||||||
|
# adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR).
|
||||||
|
# adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC).
|
||||||
|
# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
|
||||||
|
# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT).
|
||||||
|
# adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE).
|
||||||
|
# adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME).
|
||||||
|
# adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP).
|
||||||
|
# adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER).
|
||||||
|
# adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT).
|
||||||
|
# adVarNumeric 139 Indicates a numeric value (Parameter object only).
|
||||||
|
# adVarChar 200 Indicates a string value (Parameter object only).
|
||||||
|
# adLongVarChar 201 Indicates a long string value (Parameter object only).
|
||||||
|
# adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only).
|
||||||
|
# adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only).
|
||||||
|
# adVarBinary 204 Indicates a binary value (Parameter object only).
|
||||||
|
# adLongVarBinary 205 Indicates a long binary value (Parameter object only).
|
||||||
|
# adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant,
|
||||||
|
# that indicates an array of that other data type.
|
||||||
|
|
||||||
|
# Error codes to names
|
||||||
|
adoErrors = {
|
||||||
|
0xE7B: "adErrBoundToCommand",
|
||||||
|
0xE94: "adErrCannotComplete",
|
||||||
|
0xEA4: "adErrCantChangeConnection",
|
||||||
|
0xC94: "adErrCantChangeProvider",
|
||||||
|
0xE8C: "adErrCantConvertvalue",
|
||||||
|
0xE8D: "adErrCantCreate",
|
||||||
|
0xEA3: "adErrCatalogNotSet",
|
||||||
|
0xE8E: "adErrColumnNotOnThisRow",
|
||||||
|
0xD5D: "adErrDataConversion",
|
||||||
|
0xE89: "adErrDataOverflow",
|
||||||
|
0xE9A: "adErrDelResOutOfScope",
|
||||||
|
0xEA6: "adErrDenyNotSupported",
|
||||||
|
0xEA7: "adErrDenyTypeNotSupported",
|
||||||
|
0xCB3: "adErrFeatureNotAvailable",
|
||||||
|
0xEA5: "adErrFieldsUpdateFailed",
|
||||||
|
0xC93: "adErrIllegalOperation",
|
||||||
|
0xCAE: "adErrInTransaction",
|
||||||
|
0xE87: "adErrIntegrityViolation",
|
||||||
|
0xBB9: "adErrInvalidArgument",
|
||||||
|
0xE7D: "adErrInvalidConnection",
|
||||||
|
0xE7C: "adErrInvalidParamInfo",
|
||||||
|
0xE82: "adErrInvalidTransaction",
|
||||||
|
0xE91: "adErrInvalidURL",
|
||||||
|
0xCC1: "adErrItemNotFound",
|
||||||
|
0xBCD: "adErrNoCurrentRecord",
|
||||||
|
0xE83: "adErrNotExecuting",
|
||||||
|
0xE7E: "adErrNotReentrant",
|
||||||
|
0xE78: "adErrObjectClosed",
|
||||||
|
0xD27: "adErrObjectInCollection",
|
||||||
|
0xD5C: "adErrObjectNotSet",
|
||||||
|
0xE79: "adErrObjectOpen",
|
||||||
|
0xBBA: "adErrOpeningFile",
|
||||||
|
0xE80: "adErrOperationCancelled",
|
||||||
|
0xE96: "adErrOutOfSpace",
|
||||||
|
0xE88: "adErrPermissionDenied",
|
||||||
|
0xE9E: "adErrPropConflicting",
|
||||||
|
0xE9B: "adErrPropInvalidColumn",
|
||||||
|
0xE9C: "adErrPropInvalidOption",
|
||||||
|
0xE9D: "adErrPropInvalidValue",
|
||||||
|
0xE9F: "adErrPropNotAllSettable",
|
||||||
|
0xEA0: "adErrPropNotSet",
|
||||||
|
0xEA1: "adErrPropNotSettable",
|
||||||
|
0xEA2: "adErrPropNotSupported",
|
||||||
|
0xBB8: "adErrProviderFailed",
|
||||||
|
0xE7A: "adErrProviderNotFound",
|
||||||
|
0xBBB: "adErrReadFile",
|
||||||
|
0xE93: "adErrResourceExists",
|
||||||
|
0xE92: "adErrResourceLocked",
|
||||||
|
0xE97: "adErrResourceOutOfScope",
|
||||||
|
0xE8A: "adErrSchemaViolation",
|
||||||
|
0xE8B: "adErrSignMismatch",
|
||||||
|
0xE81: "adErrStillConnecting",
|
||||||
|
0xE7F: "adErrStillExecuting",
|
||||||
|
0xE90: "adErrTreePermissionDenied",
|
||||||
|
0xE8F: "adErrURLDoesNotExist",
|
||||||
|
0xE99: "adErrURLNamedRowDoesNotExist",
|
||||||
|
0xE98: "adErrUnavailable",
|
||||||
|
0xE84: "adErrUnsafeOperation",
|
||||||
|
0xE95: "adErrVolumeNotFound",
|
||||||
|
0xBBC: "adErrWriteFile",
|
||||||
|
}
|
1223
lib/adodbapi/adodbapi.py
Normal file
1223
lib/adodbapi/adodbapi.py
Normal file
File diff suppressed because it is too large
Load diff
794
lib/adodbapi/apibase.py
Normal file
794
lib/adodbapi/apibase.py
Normal file
|
@ -0,0 +1,794 @@
|
||||||
|
"""adodbapi.apibase - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||||
|
|
||||||
|
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||||
|
* http://sourceforge.net/projects/pywin32
|
||||||
|
* http://sourceforge.net/projects/adodbapi
|
||||||
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import decimal
|
||||||
|
import numbers
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
from . import ado_consts as adc
|
||||||
|
|
||||||
|
verbose = False # debugging flag
|
||||||
|
|
||||||
|
onIronPython = sys.platform == "cli"
|
||||||
|
if onIronPython: # we need type definitions for odd data we may need to convert
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
from System import DateTime, DBNull
|
||||||
|
|
||||||
|
NullTypes = (type(None), DBNull)
|
||||||
|
else:
|
||||||
|
DateTime = type(NotImplemented) # should never be seen on win32
|
||||||
|
NullTypes = type(None)
|
||||||
|
|
||||||
|
# --- define objects to smooth out Python3 <-> Python 2.x differences
|
||||||
|
unicodeType = str
|
||||||
|
longType = int
|
||||||
|
StringTypes = str
|
||||||
|
makeByteBuffer = bytes
|
||||||
|
memoryViewType = memoryview
|
||||||
|
_BaseException = Exception
|
||||||
|
|
||||||
|
try: # jdhardy -- handle bytes under IronPython & Py3
|
||||||
|
bytes
|
||||||
|
except NameError:
|
||||||
|
bytes = str # define it for old Pythons
|
||||||
|
|
||||||
|
|
||||||
|
# ------- Error handlers ------
|
||||||
|
def standardErrorHandler(connection, cursor, errorclass, errorvalue):
|
||||||
|
err = (errorclass, errorvalue)
|
||||||
|
try:
|
||||||
|
connection.messages.append(err)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if cursor is not None:
|
||||||
|
try:
|
||||||
|
cursor.messages.append(err)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise errorclass(errorvalue)
|
||||||
|
|
||||||
|
|
||||||
|
# Note: _BaseException is defined differently between Python 2.x and 3.x
|
||||||
|
class Error(_BaseException):
|
||||||
|
pass # Exception that is the base class of all other error
|
||||||
|
# exceptions. You can use this to catch all errors with one
|
||||||
|
# single 'except' statement. Warnings are not considered
|
||||||
|
# errors and thus should not use this class as base. It must
|
||||||
|
# be a subclass of the Python StandardError (defined in the
|
||||||
|
# module exceptions).
|
||||||
|
|
||||||
|
|
||||||
|
class Warning(_BaseException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InterfaceError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseError(Error):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InternalError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OperationalError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ProgrammingError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IntegrityError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DataError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotSupportedError(DatabaseError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FetchFailedError(OperationalError):
|
||||||
|
"""
|
||||||
|
Error is used by RawStoredProcedureQuerySet to determine when a fetch
|
||||||
|
failed due to a connection being closed or there is no record set
|
||||||
|
returned. (Non-standard, added especially for django)
|
||||||
|
"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# # # # # ----- Type Objects and Constructors ----- # # # # #
|
||||||
|
# Many databases need to have the input in a particular format for binding to an operation's input parameters.
|
||||||
|
# For example, if an input is destined for a DATE column, then it must be bound to the database in a particular
|
||||||
|
# string format. Similar problems exist for "Row ID" columns or large binary items (e.g. blobs or RAW columns).
|
||||||
|
# This presents problems for Python since the parameters to the executeXXX() method are untyped.
|
||||||
|
# When the database module sees a Python string object, it doesn't know if it should be bound as a simple CHAR
|
||||||
|
# column, as a raw BINARY item, or as a DATE.
|
||||||
|
#
|
||||||
|
# To overcome this problem, a module must provide the constructors defined below to create objects that can
|
||||||
|
# hold special values. When passed to the cursor methods, the module can then detect the proper type of
|
||||||
|
# the input parameter and bind it accordingly.
|
||||||
|
|
||||||
|
# A Cursor Object's description attribute returns information about each of the result columns of a query.
|
||||||
|
# The type_code must compare equal to one of Type Objects defined below. Type Objects may be equal to more than
|
||||||
|
# one type code (e.g. DATETIME could be equal to the type codes for date, time and timestamp columns;
|
||||||
|
# see the Implementation Hints below for details).
|
||||||
|
|
||||||
|
# SQL NULL values are represented by the Python None singleton on input and output.
|
||||||
|
|
||||||
|
# Note: Usage of Unix ticks for database interfacing can cause troubles because of the limited date range they cover.
|
||||||
|
|
||||||
|
|
||||||
|
# def Date(year,month,day):
|
||||||
|
# "This function constructs an object holding a date value. "
|
||||||
|
# return dateconverter.date(year,month,day) #dateconverter.Date(year,month,day)
|
||||||
|
#
|
||||||
|
# def Time(hour,minute,second):
|
||||||
|
# "This function constructs an object holding a time value. "
|
||||||
|
# return dateconverter.time(hour, minute, second) # dateconverter.Time(hour,minute,second)
|
||||||
|
#
|
||||||
|
# def Timestamp(year,month,day,hour,minute,second):
|
||||||
|
# "This function constructs an object holding a time stamp value. "
|
||||||
|
# return dateconverter.datetime(year,month,day,hour,minute,second)
|
||||||
|
#
|
||||||
|
# def DateFromTicks(ticks):
|
||||||
|
# """This function constructs an object holding a date value from the given ticks value
|
||||||
|
# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """
|
||||||
|
# return Date(*time.gmtime(ticks)[:3])
|
||||||
|
#
|
||||||
|
# def TimeFromTicks(ticks):
|
||||||
|
# """This function constructs an object holding a time value from the given ticks value
|
||||||
|
# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """
|
||||||
|
# return Time(*time.gmtime(ticks)[3:6])
|
||||||
|
#
|
||||||
|
# def TimestampFromTicks(ticks):
|
||||||
|
# """This function constructs an object holding a time stamp value from the given
|
||||||
|
# ticks value (number of seconds since the epoch;
|
||||||
|
# see the documentation of the standard Python time module for details). """
|
||||||
|
# return Timestamp(*time.gmtime(ticks)[:6])
|
||||||
|
#
|
||||||
|
# def Binary(aString):
|
||||||
|
# """This function constructs an object capable of holding a binary (long) string value. """
|
||||||
|
# b = makeByteBuffer(aString)
|
||||||
|
# return b
|
||||||
|
# ----- Time converters ----------------------------------------------
|
||||||
|
class TimeConverter(object): # this is a generic time converter skeleton
|
||||||
|
def __init__(self): # the details will be filled in by instances
|
||||||
|
self._ordinal_1899_12_31 = datetime.date(1899, 12, 31).toordinal() - 1
|
||||||
|
# Use cls.types to compare if an input parameter is a datetime
|
||||||
|
self.types = {
|
||||||
|
type(self.Date(2000, 1, 1)),
|
||||||
|
type(self.Time(12, 1, 1)),
|
||||||
|
type(self.Timestamp(2000, 1, 1, 12, 1, 1)),
|
||||||
|
datetime.datetime,
|
||||||
|
datetime.time,
|
||||||
|
datetime.date,
|
||||||
|
}
|
||||||
|
|
||||||
|
def COMDate(self, obj):
|
||||||
|
"""Returns a ComDate from a date-time"""
|
||||||
|
try: # most likely a datetime
|
||||||
|
tt = obj.timetuple()
|
||||||
|
|
||||||
|
try:
|
||||||
|
ms = obj.microsecond
|
||||||
|
except:
|
||||||
|
ms = 0
|
||||||
|
return self.ComDateFromTuple(tt, ms)
|
||||||
|
except: # might be a tuple
|
||||||
|
try:
|
||||||
|
return self.ComDateFromTuple(obj)
|
||||||
|
except: # try an mxdate
|
||||||
|
try:
|
||||||
|
return obj.COMDate()
|
||||||
|
except:
|
||||||
|
raise ValueError('Cannot convert "%s" to COMdate.' % repr(obj))
|
||||||
|
|
||||||
|
def ComDateFromTuple(self, t, microseconds=0):
|
||||||
|
d = datetime.date(t[0], t[1], t[2])
|
||||||
|
integerPart = d.toordinal() - self._ordinal_1899_12_31
|
||||||
|
ms = (t[3] * 3600 + t[4] * 60 + t[5]) * 1000000 + microseconds
|
||||||
|
fractPart = float(ms) / 86400000000.0
|
||||||
|
return integerPart + fractPart
|
||||||
|
|
||||||
|
def DateObjectFromCOMDate(self, comDate):
|
||||||
|
"Returns an object of the wanted type from a ComDate"
|
||||||
|
raise NotImplementedError # "Abstract class"
|
||||||
|
|
||||||
|
def Date(self, year, month, day):
|
||||||
|
"This function constructs an object holding a date value."
|
||||||
|
raise NotImplementedError # "Abstract class"
|
||||||
|
|
||||||
|
def Time(self, hour, minute, second):
|
||||||
|
"This function constructs an object holding a time value."
|
||||||
|
raise NotImplementedError # "Abstract class"
|
||||||
|
|
||||||
|
def Timestamp(self, year, month, day, hour, minute, second):
|
||||||
|
"This function constructs an object holding a time stamp value."
|
||||||
|
raise NotImplementedError # "Abstract class"
|
||||||
|
# all purpose date to ISO format converter
|
||||||
|
|
||||||
|
def DateObjectToIsoFormatString(self, obj):
|
||||||
|
"This function should return a string in the format 'YYYY-MM-dd HH:MM:SS:ms' (ms optional)"
|
||||||
|
try: # most likely, a datetime.datetime
|
||||||
|
s = obj.isoformat(" ")
|
||||||
|
except (TypeError, AttributeError):
|
||||||
|
if isinstance(obj, datetime.date):
|
||||||
|
s = obj.isoformat() + " 00:00:00" # return exact midnight
|
||||||
|
else:
|
||||||
|
try: # maybe it has a strftime method, like mx
|
||||||
|
s = obj.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
except AttributeError:
|
||||||
|
try: # but may be time.struct_time
|
||||||
|
s = time.strftime("%Y-%m-%d %H:%M:%S", obj)
|
||||||
|
except:
|
||||||
|
raise ValueError('Cannot convert "%s" to isoformat' % repr(obj))
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
# -- Optional: if mx extensions are installed you may use mxDateTime ----
|
||||||
|
try:
|
||||||
|
import mx.DateTime
|
||||||
|
|
||||||
|
mxDateTime = True
|
||||||
|
except:
|
||||||
|
mxDateTime = False
|
||||||
|
if mxDateTime:
|
||||||
|
|
||||||
|
class mxDateTimeConverter(TimeConverter): # used optionally if installed
|
||||||
|
def __init__(self):
|
||||||
|
TimeConverter.__init__(self)
|
||||||
|
self.types.add(type(mx.DateTime))
|
||||||
|
|
||||||
|
def DateObjectFromCOMDate(self, comDate):
|
||||||
|
return mx.DateTime.DateTimeFromCOMDate(comDate)
|
||||||
|
|
||||||
|
def Date(self, year, month, day):
|
||||||
|
return mx.DateTime.Date(year, month, day)
|
||||||
|
|
||||||
|
def Time(self, hour, minute, second):
|
||||||
|
return mx.DateTime.Time(hour, minute, second)
|
||||||
|
|
||||||
|
def Timestamp(self, year, month, day, hour, minute, second):
|
||||||
|
return mx.DateTime.Timestamp(year, month, day, hour, minute, second)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
class mxDateTimeConverter(TimeConverter):
|
||||||
|
pass # if no mx is installed
|
||||||
|
|
||||||
|
|
||||||
|
class pythonDateTimeConverter(TimeConverter): # standard since Python 2.3
|
||||||
|
def __init__(self):
|
||||||
|
TimeConverter.__init__(self)
|
||||||
|
|
||||||
|
def DateObjectFromCOMDate(self, comDate):
|
||||||
|
if isinstance(comDate, datetime.datetime):
|
||||||
|
odn = comDate.toordinal()
|
||||||
|
tim = comDate.time()
|
||||||
|
new = datetime.datetime.combine(datetime.datetime.fromordinal(odn), tim)
|
||||||
|
return new
|
||||||
|
# return comDate.replace(tzinfo=None) # make non aware
|
||||||
|
elif isinstance(comDate, DateTime):
|
||||||
|
fComDate = comDate.ToOADate() # ironPython clr Date/Time
|
||||||
|
else:
|
||||||
|
fComDate = float(comDate) # ComDate is number of days since 1899-12-31
|
||||||
|
integerPart = int(fComDate)
|
||||||
|
floatpart = fComDate - integerPart
|
||||||
|
##if floatpart == 0.0:
|
||||||
|
## return datetime.date.fromordinal(integerPart + self._ordinal_1899_12_31)
|
||||||
|
dte = datetime.datetime.fromordinal(
|
||||||
|
integerPart + self._ordinal_1899_12_31
|
||||||
|
) + datetime.timedelta(milliseconds=floatpart * 86400000)
|
||||||
|
# millisecondsperday=86400000 # 24*60*60*1000
|
||||||
|
return dte
|
||||||
|
|
||||||
|
def Date(self, year, month, day):
|
||||||
|
return datetime.date(year, month, day)
|
||||||
|
|
||||||
|
def Time(self, hour, minute, second):
|
||||||
|
return datetime.time(hour, minute, second)
|
||||||
|
|
||||||
|
def Timestamp(self, year, month, day, hour, minute, second):
|
||||||
|
return datetime.datetime(year, month, day, hour, minute, second)
|
||||||
|
|
||||||
|
|
||||||
|
class pythonTimeConverter(TimeConverter): # the old, ?nix type date and time
|
||||||
|
def __init__(self): # caution: this Class gets confised by timezones and DST
|
||||||
|
TimeConverter.__init__(self)
|
||||||
|
self.types.add(time.struct_time)
|
||||||
|
|
||||||
|
def DateObjectFromCOMDate(self, comDate):
|
||||||
|
"Returns ticks since 1970"
|
||||||
|
if isinstance(comDate, datetime.datetime):
|
||||||
|
return comDate.timetuple()
|
||||||
|
elif isinstance(comDate, DateTime): # ironPython clr date/time
|
||||||
|
fcomDate = comDate.ToOADate()
|
||||||
|
else:
|
||||||
|
fcomDate = float(comDate)
|
||||||
|
secondsperday = 86400 # 24*60*60
|
||||||
|
# ComDate is number of days since 1899-12-31, gmtime epoch is 1970-1-1 = 25569 days
|
||||||
|
t = time.gmtime(secondsperday * (fcomDate - 25569.0))
|
||||||
|
return t # year,month,day,hour,minute,second,weekday,julianday,daylightsaving=t
|
||||||
|
|
||||||
|
def Date(self, year, month, day):
|
||||||
|
return self.Timestamp(year, month, day, 0, 0, 0)
|
||||||
|
|
||||||
|
def Time(self, hour, minute, second):
|
||||||
|
return time.gmtime((hour * 60 + minute) * 60 + second)
|
||||||
|
|
||||||
|
def Timestamp(self, year, month, day, hour, minute, second):
|
||||||
|
return time.localtime(
|
||||||
|
time.mktime((year, month, day, hour, minute, second, 0, 0, -1))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
base_dateconverter = pythonDateTimeConverter()
|
||||||
|
|
||||||
|
# ------ DB API required module attributes ---------------------
|
||||||
|
threadsafety = 1 # TODO -- find out whether this module is actually BETTER than 1.
|
||||||
|
|
||||||
|
apilevel = "2.0" # String constant stating the supported DB API level.
|
||||||
|
|
||||||
|
paramstyle = "qmark" # the default parameter style
|
||||||
|
|
||||||
|
# ------ control for an extension which may become part of DB API 3.0 ---
|
||||||
|
accepted_paramstyles = ("qmark", "named", "format", "pyformat", "dynamic")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------------------------------
|
||||||
|
# define similar types for generic conversion routines
|
||||||
|
adoIntegerTypes = (
|
||||||
|
adc.adInteger,
|
||||||
|
adc.adSmallInt,
|
||||||
|
adc.adTinyInt,
|
||||||
|
adc.adUnsignedInt,
|
||||||
|
adc.adUnsignedSmallInt,
|
||||||
|
adc.adUnsignedTinyInt,
|
||||||
|
adc.adBoolean,
|
||||||
|
adc.adError,
|
||||||
|
) # max 32 bits
|
||||||
|
adoRowIdTypes = (adc.adChapter,) # v2.1 Rose
|
||||||
|
adoLongTypes = (adc.adBigInt, adc.adFileTime, adc.adUnsignedBigInt)
|
||||||
|
adoExactNumericTypes = (
|
||||||
|
adc.adDecimal,
|
||||||
|
adc.adNumeric,
|
||||||
|
adc.adVarNumeric,
|
||||||
|
adc.adCurrency,
|
||||||
|
) # v2.3 Cole
|
||||||
|
adoApproximateNumericTypes = (adc.adDouble, adc.adSingle) # v2.1 Cole
|
||||||
|
adoStringTypes = (
|
||||||
|
adc.adBSTR,
|
||||||
|
adc.adChar,
|
||||||
|
adc.adLongVarChar,
|
||||||
|
adc.adLongVarWChar,
|
||||||
|
adc.adVarChar,
|
||||||
|
adc.adVarWChar,
|
||||||
|
adc.adWChar,
|
||||||
|
)
|
||||||
|
adoBinaryTypes = (adc.adBinary, adc.adLongVarBinary, adc.adVarBinary)
|
||||||
|
adoDateTimeTypes = (adc.adDBTime, adc.adDBTimeStamp, adc.adDate, adc.adDBDate)
|
||||||
|
adoRemainingTypes = (
|
||||||
|
adc.adEmpty,
|
||||||
|
adc.adIDispatch,
|
||||||
|
adc.adIUnknown,
|
||||||
|
adc.adPropVariant,
|
||||||
|
adc.adArray,
|
||||||
|
adc.adUserDefined,
|
||||||
|
adc.adVariant,
|
||||||
|
adc.adGUID,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# this class is a trick to determine whether a type is a member of a related group of types. see PEP notes
|
||||||
|
class DBAPITypeObject(object):
|
||||||
|
def __init__(self, valuesTuple):
|
||||||
|
self.values = frozenset(valuesTuple)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return other in self.values
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return other not in self.values
|
||||||
|
|
||||||
|
|
||||||
|
"""This type object is used to describe columns in a database that are string-based (e.g. CHAR). """
|
||||||
|
STRING = DBAPITypeObject(adoStringTypes)
|
||||||
|
|
||||||
|
"""This type object is used to describe (long) binary columns in a database (e.g. LONG, RAW, BLOBs). """
|
||||||
|
BINARY = DBAPITypeObject(adoBinaryTypes)
|
||||||
|
|
||||||
|
"""This type object is used to describe numeric columns in a database. """
|
||||||
|
NUMBER = DBAPITypeObject(
|
||||||
|
adoIntegerTypes + adoLongTypes + adoExactNumericTypes + adoApproximateNumericTypes
|
||||||
|
)
|
||||||
|
|
||||||
|
"""This type object is used to describe date/time columns in a database. """
|
||||||
|
|
||||||
|
DATETIME = DBAPITypeObject(adoDateTimeTypes)
|
||||||
|
"""This type object is used to describe the "Row ID" column in a database. """
|
||||||
|
ROWID = DBAPITypeObject(adoRowIdTypes)
|
||||||
|
|
||||||
|
OTHER = DBAPITypeObject(adoRemainingTypes)
|
||||||
|
|
||||||
|
# ------- utilities for translating python data types to ADO data types ---------------------------------
|
||||||
|
typeMap = {
|
||||||
|
memoryViewType: adc.adVarBinary,
|
||||||
|
float: adc.adDouble,
|
||||||
|
type(None): adc.adEmpty,
|
||||||
|
str: adc.adBSTR,
|
||||||
|
bool: adc.adBoolean, # v2.1 Cole
|
||||||
|
decimal.Decimal: adc.adDecimal,
|
||||||
|
int: adc.adBigInt,
|
||||||
|
bytes: adc.adVarBinary,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def pyTypeToADOType(d):
|
||||||
|
tp = type(d)
|
||||||
|
try:
|
||||||
|
return typeMap[tp]
|
||||||
|
except KeyError: # The type was not defined in the pre-computed Type table
|
||||||
|
from . import dateconverter
|
||||||
|
|
||||||
|
if (
|
||||||
|
tp in dateconverter.types
|
||||||
|
): # maybe it is one of our supported Date/Time types
|
||||||
|
return adc.adDate
|
||||||
|
# otherwise, attempt to discern the type by probing the data object itself -- to handle duck typing
|
||||||
|
if isinstance(d, StringTypes):
|
||||||
|
return adc.adBSTR
|
||||||
|
if isinstance(d, numbers.Integral):
|
||||||
|
return adc.adBigInt
|
||||||
|
if isinstance(d, numbers.Real):
|
||||||
|
return adc.adDouble
|
||||||
|
raise DataError('cannot convert "%s" (type=%s) to ADO' % (repr(d), tp))
|
||||||
|
|
||||||
|
|
||||||
|
# # # # # # # # # # # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
|
# functions to convert database values to Python objects
|
||||||
|
# ------------------------------------------------------------------------
|
||||||
|
# variant type : function converting variant to Python value
|
||||||
|
def variantConvertDate(v):
|
||||||
|
from . import dateconverter # this function only called when adodbapi is running
|
||||||
|
|
||||||
|
return dateconverter.DateObjectFromCOMDate(v)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtString(variant): # use to get old action of adodbapi v1 if desired
|
||||||
|
if onIronPython:
|
||||||
|
try:
|
||||||
|
return variant.ToString()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return str(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtDecimal(variant): # better name
|
||||||
|
return _convertNumberWithCulture(variant, decimal.Decimal)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtNumeric(variant): # older name - don't break old code
|
||||||
|
return cvtDecimal(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtFloat(variant):
|
||||||
|
return _convertNumberWithCulture(variant, float)
|
||||||
|
|
||||||
|
|
||||||
|
def _convertNumberWithCulture(variant, f):
|
||||||
|
try:
|
||||||
|
return f(variant)
|
||||||
|
except (ValueError, TypeError, decimal.InvalidOperation):
|
||||||
|
try:
|
||||||
|
europeVsUS = str(variant).replace(",", ".")
|
||||||
|
return f(europeVsUS)
|
||||||
|
except (ValueError, TypeError, decimal.InvalidOperation):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def cvtInt(variant):
|
||||||
|
return int(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtLong(variant): # only important in old versions where long and int differ
|
||||||
|
return int(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtBuffer(variant):
|
||||||
|
return bytes(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def cvtUnicode(variant):
|
||||||
|
return str(variant)
|
||||||
|
|
||||||
|
|
||||||
|
def identity(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
def cvtUnusual(variant):
|
||||||
|
if verbose > 1:
|
||||||
|
sys.stderr.write("Conversion called for Unusual data=%s\n" % repr(variant))
|
||||||
|
if isinstance(variant, DateTime): # COMdate or System.Date
|
||||||
|
from .adodbapi import ( # this will only be called when adodbapi is in use, and very rarely
|
||||||
|
dateconverter,
|
||||||
|
)
|
||||||
|
|
||||||
|
return dateconverter.DateObjectFromCOMDate(variant)
|
||||||
|
return variant # cannot find conversion function -- just give the data to the user
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_python(variant, func): # convert DB value into Python value
|
||||||
|
if isinstance(variant, NullTypes): # IronPython Null or None
|
||||||
|
return None
|
||||||
|
return func(variant) # call the appropriate conversion function
|
||||||
|
|
||||||
|
|
||||||
|
class MultiMap(dict): # builds a dictionary from {(sequence,of,keys) : function}
|
||||||
|
"""A dictionary of ado.type : function -- but you can set multiple items by passing a sequence of keys"""
|
||||||
|
|
||||||
|
# useful for defining conversion functions for groups of similar data types.
|
||||||
|
def __init__(self, aDict):
|
||||||
|
for k, v in list(aDict.items()):
|
||||||
|
self[k] = v # we must call __setitem__
|
||||||
|
|
||||||
|
def __setitem__(self, adoType, cvtFn):
|
||||||
|
"set a single item, or a whole sequence of items"
|
||||||
|
try: # user passed us a sequence, set them individually
|
||||||
|
for type in adoType:
|
||||||
|
dict.__setitem__(self, type, cvtFn)
|
||||||
|
except TypeError: # a single value fails attempt to iterate
|
||||||
|
dict.__setitem__(self, adoType, cvtFn)
|
||||||
|
|
||||||
|
|
||||||
|
# initialize variantConversions dictionary used to convert SQL to Python
|
||||||
|
# this is the dictionary of default conversion functions, built by the class above.
|
||||||
|
# this becomes a class attribute for the Connection, and that attribute is used
|
||||||
|
# to build the list of column conversion functions for the Cursor
|
||||||
|
variantConversions = MultiMap(
|
||||||
|
{
|
||||||
|
adoDateTimeTypes: variantConvertDate,
|
||||||
|
adoApproximateNumericTypes: cvtFloat,
|
||||||
|
adoExactNumericTypes: cvtDecimal, # use to force decimal rather than unicode
|
||||||
|
adoLongTypes: cvtLong,
|
||||||
|
adoIntegerTypes: cvtInt,
|
||||||
|
adoRowIdTypes: cvtInt,
|
||||||
|
adoStringTypes: identity,
|
||||||
|
adoBinaryTypes: cvtBuffer,
|
||||||
|
adoRemainingTypes: cvtUnusual,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# # # # # classes to emulate the result of cursor.fetchxxx() as a sequence of sequences # # # # #
|
||||||
|
# "an ENUM of how my low level records are laid out"
|
||||||
|
RS_WIN_32, RS_ARRAY, RS_REMOTE = list(range(1, 4))
|
||||||
|
|
||||||
|
|
||||||
|
class SQLrow(object): # a single database row
|
||||||
|
# class to emulate a sequence, so that a column may be retrieved by either number or name
|
||||||
|
def __init__(self, rows, index): # "rows" is an _SQLrows object, index is which row
|
||||||
|
self.rows = rows # parent 'fetch' container object
|
||||||
|
self.index = index # my row number within parent
|
||||||
|
|
||||||
|
def __getattr__(self, name): # used for row.columnName type of value access
|
||||||
|
try:
|
||||||
|
return self._getValue(self.rows.columnNames[name.lower()])
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError('Unknown column name "{}"'.format(name))
|
||||||
|
|
||||||
|
def _getValue(self, key): # key must be an integer
|
||||||
|
if (
|
||||||
|
self.rows.recordset_format == RS_ARRAY
|
||||||
|
): # retrieve from two-dimensional array
|
||||||
|
v = self.rows.ado_results[key, self.index]
|
||||||
|
elif self.rows.recordset_format == RS_REMOTE:
|
||||||
|
v = self.rows.ado_results[self.index][key]
|
||||||
|
else: # pywin32 - retrieve from tuple of tuples
|
||||||
|
v = self.rows.ado_results[key][self.index]
|
||||||
|
if self.rows.converters is NotImplemented:
|
||||||
|
return v
|
||||||
|
return convert_to_python(v, self.rows.converters[key])
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self.rows.numberOfColumns
|
||||||
|
|
||||||
|
def __getitem__(self, key): # used for row[key] type of value access
|
||||||
|
if isinstance(key, int): # normal row[1] designation
|
||||||
|
try:
|
||||||
|
return self._getValue(key)
|
||||||
|
except IndexError:
|
||||||
|
raise
|
||||||
|
if isinstance(key, slice):
|
||||||
|
indices = key.indices(self.rows.numberOfColumns)
|
||||||
|
vl = [self._getValue(i) for i in range(*indices)]
|
||||||
|
return tuple(vl)
|
||||||
|
try:
|
||||||
|
return self._getValue(
|
||||||
|
self.rows.columnNames[key.lower()]
|
||||||
|
) # extension row[columnName] designation
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
er, st, tr = sys.exc_info()
|
||||||
|
raise er(
|
||||||
|
'No such key as "%s" in %s' % (repr(key), self.__repr__())
|
||||||
|
).with_traceback(tr)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__next__())
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
for n in range(self.rows.numberOfColumns):
|
||||||
|
yield self._getValue(n)
|
||||||
|
|
||||||
|
def __repr__(self): # create a human readable representation
|
||||||
|
taglist = sorted(list(self.rows.columnNames.items()), key=lambda x: x[1])
|
||||||
|
s = "<SQLrow={"
|
||||||
|
for name, i in taglist:
|
||||||
|
s += name + ":" + repr(self._getValue(i)) + ", "
|
||||||
|
return s[:-2] + "}>"
|
||||||
|
|
||||||
|
def __str__(self): # create a pretty human readable representation
|
||||||
|
return str(
|
||||||
|
tuple(str(self._getValue(i)) for i in range(self.rows.numberOfColumns))
|
||||||
|
)
|
||||||
|
|
||||||
|
# TO-DO implement pickling an SQLrow directly
|
||||||
|
# def __getstate__(self): return self.__dict__
|
||||||
|
# def __setstate__(self, d): self.__dict__.update(d)
|
||||||
|
# which basically tell pickle to treat your class just like a normal one,
|
||||||
|
# taking self.__dict__ as representing the whole of the instance state,
|
||||||
|
# despite the existence of the __getattr__.
|
||||||
|
# # # #
|
||||||
|
|
||||||
|
|
||||||
|
class SQLrows(object):
|
||||||
|
# class to emulate a sequence for multiple rows using a container object
|
||||||
|
def __init__(self, ado_results, numberOfRows, cursor):
|
||||||
|
self.ado_results = ado_results # raw result of SQL get
|
||||||
|
try:
|
||||||
|
self.recordset_format = cursor.recordset_format
|
||||||
|
self.numberOfColumns = cursor.numberOfColumns
|
||||||
|
self.converters = cursor.converters
|
||||||
|
self.columnNames = cursor.columnNames
|
||||||
|
except AttributeError:
|
||||||
|
self.recordset_format = RS_ARRAY
|
||||||
|
self.numberOfColumns = 0
|
||||||
|
self.converters = []
|
||||||
|
self.columnNames = {}
|
||||||
|
self.numberOfRows = numberOfRows
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self.numberOfRows
|
||||||
|
|
||||||
|
def __getitem__(self, item): # used for row or row,column access
|
||||||
|
if not self.ado_results:
|
||||||
|
return []
|
||||||
|
if isinstance(item, slice): # will return a list of row objects
|
||||||
|
indices = item.indices(self.numberOfRows)
|
||||||
|
return [SQLrow(self, k) for k in range(*indices)]
|
||||||
|
elif isinstance(item, tuple) and len(item) == 2:
|
||||||
|
# d = some_rowsObject[i,j] will return a datum from a two-dimension address
|
||||||
|
i, j = item
|
||||||
|
if not isinstance(j, int):
|
||||||
|
try:
|
||||||
|
j = self.columnNames[j.lower()] # convert named column to numeric
|
||||||
|
except KeyError:
|
||||||
|
raise KeyError('adodbapi: no such column name as "%s"' % repr(j))
|
||||||
|
if self.recordset_format == RS_ARRAY: # retrieve from two-dimensional array
|
||||||
|
v = self.ado_results[j, i]
|
||||||
|
elif self.recordset_format == RS_REMOTE:
|
||||||
|
v = self.ado_results[i][j]
|
||||||
|
else: # pywin32 - retrieve from tuple of tuples
|
||||||
|
v = self.ado_results[j][i]
|
||||||
|
if self.converters is NotImplemented:
|
||||||
|
return v
|
||||||
|
return convert_to_python(v, self.converters[j])
|
||||||
|
else:
|
||||||
|
row = SQLrow(self, item) # new row descriptor
|
||||||
|
return row
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.__next__())
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
for n in range(self.numberOfRows):
|
||||||
|
row = SQLrow(self, n)
|
||||||
|
yield row
|
||||||
|
# # # # #
|
||||||
|
|
||||||
|
# # # # # functions to re-format SQL requests to other paramstyle requirements # # # # # # # # # #
|
||||||
|
|
||||||
|
|
||||||
|
def changeNamedToQmark(
|
||||||
|
op,
|
||||||
|
): # convert from 'named' paramstyle to ADO required '?'mark parameters
|
||||||
|
outOp = ""
|
||||||
|
outparms = []
|
||||||
|
chunks = op.split(
|
||||||
|
"'"
|
||||||
|
) # quote all literals -- odd numbered list results are literals.
|
||||||
|
inQuotes = False
|
||||||
|
for chunk in chunks:
|
||||||
|
if inQuotes: # this is inside a quote
|
||||||
|
if chunk == "": # double apostrophe to quote one apostrophe
|
||||||
|
outOp = outOp[:-1] # so take one away
|
||||||
|
else:
|
||||||
|
outOp += "'" + chunk + "'" # else pass the quoted string as is.
|
||||||
|
else: # is SQL code -- look for a :namedParameter
|
||||||
|
while chunk: # some SQL string remains
|
||||||
|
sp = chunk.split(":", 1)
|
||||||
|
outOp += sp[0] # concat the part up to the :
|
||||||
|
s = ""
|
||||||
|
try:
|
||||||
|
chunk = sp[1]
|
||||||
|
except IndexError:
|
||||||
|
chunk = None
|
||||||
|
if chunk: # there was a parameter - parse it out
|
||||||
|
i = 0
|
||||||
|
c = chunk[0]
|
||||||
|
while c.isalnum() or c == "_":
|
||||||
|
i += 1
|
||||||
|
try:
|
||||||
|
c = chunk[i]
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
s = chunk[:i]
|
||||||
|
chunk = chunk[i:]
|
||||||
|
if s:
|
||||||
|
outparms.append(s) # list the parameters in order
|
||||||
|
outOp += "?" # put in the Qmark
|
||||||
|
inQuotes = not inQuotes
|
||||||
|
return outOp, outparms
|
||||||
|
|
||||||
|
|
||||||
|
def changeFormatToQmark(
|
||||||
|
op,
|
||||||
|
): # convert from 'format' paramstyle to ADO required '?'mark parameters
|
||||||
|
outOp = ""
|
||||||
|
outparams = []
|
||||||
|
chunks = op.split(
|
||||||
|
"'"
|
||||||
|
) # quote all literals -- odd numbered list results are literals.
|
||||||
|
inQuotes = False
|
||||||
|
for chunk in chunks:
|
||||||
|
if inQuotes:
|
||||||
|
if (
|
||||||
|
outOp != "" and chunk == ""
|
||||||
|
): # he used a double apostrophe to quote one apostrophe
|
||||||
|
outOp = outOp[:-1] # so take one away
|
||||||
|
else:
|
||||||
|
outOp += "'" + chunk + "'" # else pass the quoted string as is.
|
||||||
|
else: # is SQL code -- look for a %s parameter
|
||||||
|
if "%(" in chunk: # ugh! pyformat!
|
||||||
|
while chunk: # some SQL string remains
|
||||||
|
sp = chunk.split("%(", 1)
|
||||||
|
outOp += sp[0] # concat the part up to the %
|
||||||
|
if len(sp) > 1:
|
||||||
|
try:
|
||||||
|
s, chunk = sp[1].split(")s", 1) # find the ')s'
|
||||||
|
except ValueError:
|
||||||
|
raise ProgrammingError(
|
||||||
|
'Pyformat SQL has incorrect format near "%s"' % chunk
|
||||||
|
)
|
||||||
|
outparams.append(s)
|
||||||
|
outOp += "?" # put in the Qmark
|
||||||
|
else:
|
||||||
|
chunk = None
|
||||||
|
else: # proper '%s' format
|
||||||
|
sp = chunk.split("%s") # make each %s
|
||||||
|
outOp += "?".join(sp) # into ?
|
||||||
|
inQuotes = not inQuotes # every other chunk is a quoted string
|
||||||
|
return outOp, outparams
|
72
lib/adodbapi/examples/db_print.py
Normal file
72
lib/adodbapi/examples/db_print.py
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
""" db_print.py -- a simple demo for ADO database reads."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import adodbapi.ado_consts as adc
|
||||||
|
|
||||||
|
cmd_args = ("filename", "table_name")
|
||||||
|
if "help" in sys.argv:
|
||||||
|
print("possible settings keywords are:", cmd_args)
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
kw_args = {} # pick up filename and proxy address from command line (optionally)
|
||||||
|
for arg in sys.argv:
|
||||||
|
s = arg.split("=")
|
||||||
|
if len(s) > 1:
|
||||||
|
if s[0] in cmd_args:
|
||||||
|
kw_args[s[0]] = s[1]
|
||||||
|
|
||||||
|
kw_args.setdefault(
|
||||||
|
"filename", "test.mdb"
|
||||||
|
) # assumes server is running from examples folder
|
||||||
|
kw_args.setdefault("table_name", "Products") # the name of the demo table
|
||||||
|
|
||||||
|
# the server needs to select the provider based on his Python installation
|
||||||
|
provider_switch = ["provider", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
|
||||||
|
|
||||||
|
# ------------------------ START HERE -------------------------------------
|
||||||
|
# create the connection
|
||||||
|
constr = "Provider=%(provider)s;Data Source=%(filename)s"
|
||||||
|
import adodbapi as db
|
||||||
|
|
||||||
|
con = db.connect(constr, kw_args, macro_is64bit=provider_switch)
|
||||||
|
|
||||||
|
if kw_args["table_name"] == "?":
|
||||||
|
print("The tables in your database are:")
|
||||||
|
for name in con.get_table_names():
|
||||||
|
print(name)
|
||||||
|
else:
|
||||||
|
# make a cursor on the connection
|
||||||
|
with con.cursor() as c:
|
||||||
|
# run an SQL statement on the cursor
|
||||||
|
sql = "select * from %s" % kw_args["table_name"]
|
||||||
|
print('performing query="%s"' % sql)
|
||||||
|
c.execute(sql)
|
||||||
|
|
||||||
|
# check the results
|
||||||
|
print(
|
||||||
|
'result rowcount shows as= %d. (Note: -1 means "not known")' % (c.rowcount,)
|
||||||
|
)
|
||||||
|
print("")
|
||||||
|
print("result data description is:")
|
||||||
|
print(" NAME Type DispSize IntrnlSz Prec Scale Null?")
|
||||||
|
for d in c.description:
|
||||||
|
print(
|
||||||
|
("%16s %-12s %8s %8d %4d %5d %s")
|
||||||
|
% (d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4], d[5], bool(d[6]))
|
||||||
|
)
|
||||||
|
print("")
|
||||||
|
print("str() of first five records are...")
|
||||||
|
|
||||||
|
# get the results
|
||||||
|
db = c.fetchmany(5)
|
||||||
|
|
||||||
|
# print them
|
||||||
|
for rec in db:
|
||||||
|
print(rec)
|
||||||
|
|
||||||
|
print("")
|
||||||
|
print("repr() of next row is...")
|
||||||
|
print(repr(c.fetchone()))
|
||||||
|
print("")
|
||||||
|
con.close()
|
20
lib/adodbapi/examples/db_table_names.py
Normal file
20
lib/adodbapi/examples/db_table_names.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
""" db_table_names.py -- a simple demo for ADO database table listing."""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import adodbapi
|
||||||
|
|
||||||
|
try:
|
||||||
|
databasename = sys.argv[1]
|
||||||
|
except IndexError:
|
||||||
|
databasename = "test.mdb"
|
||||||
|
|
||||||
|
provider = ["prv", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"]
|
||||||
|
constr = "Provider=%(prv)s;Data Source=%(db)s"
|
||||||
|
|
||||||
|
# create the connection
|
||||||
|
con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider)
|
||||||
|
|
||||||
|
print("Table names in= %s" % databasename)
|
||||||
|
|
||||||
|
for table in con.get_table_names():
|
||||||
|
print(table)
|
41
lib/adodbapi/examples/xls_read.py
Normal file
41
lib/adodbapi/examples/xls_read.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import adodbapi
|
||||||
|
|
||||||
|
try:
|
||||||
|
import adodbapi.is64bit as is64bit
|
||||||
|
|
||||||
|
is64 = is64bit.Python()
|
||||||
|
except ImportError:
|
||||||
|
is64 = False
|
||||||
|
|
||||||
|
if is64:
|
||||||
|
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||||
|
else:
|
||||||
|
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||||
|
extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"'
|
||||||
|
|
||||||
|
try: # first command line argument will be xls file name -- default to the one written by xls_write.py
|
||||||
|
filename = sys.argv[1]
|
||||||
|
except IndexError:
|
||||||
|
filename = "xx.xls"
|
||||||
|
|
||||||
|
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
|
||||||
|
|
||||||
|
conn = adodbapi.connect(constr)
|
||||||
|
|
||||||
|
try: # second command line argument will be worksheet name -- default to first worksheet
|
||||||
|
sheet = sys.argv[2]
|
||||||
|
except IndexError:
|
||||||
|
# use ADO feature to get the name of the first worksheet
|
||||||
|
sheet = conn.get_table_names()[0]
|
||||||
|
|
||||||
|
print("Shreadsheet=%s Worksheet=%s" % (filename, sheet))
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
crsr = conn.cursor()
|
||||||
|
sql = "SELECT * from [%s]" % sheet
|
||||||
|
crsr.execute(sql)
|
||||||
|
for row in crsr.fetchmany(10):
|
||||||
|
print(repr(row))
|
||||||
|
crsr.close()
|
||||||
|
conn.close()
|
41
lib/adodbapi/examples/xls_write.py
Normal file
41
lib/adodbapi/examples/xls_write.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import adodbapi
|
||||||
|
|
||||||
|
try:
|
||||||
|
import adodbapi.is64bit as is64bit
|
||||||
|
|
||||||
|
is64 = is64bit.Python()
|
||||||
|
except ImportError:
|
||||||
|
is64 = False # in case the user has an old version of adodbapi
|
||||||
|
if is64:
|
||||||
|
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||||
|
else:
|
||||||
|
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||||
|
filename = "xx.xls" # file will be created if it does not exist
|
||||||
|
extended = 'Extended Properties="Excel 8.0;Readonly=False;"'
|
||||||
|
|
||||||
|
constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended)
|
||||||
|
|
||||||
|
conn = adodbapi.connect(constr)
|
||||||
|
with conn: # will auto commit if no errors
|
||||||
|
with conn.cursor() as crsr:
|
||||||
|
try:
|
||||||
|
crsr.execute("drop table SheetOne")
|
||||||
|
except:
|
||||||
|
pass # just is case there is one already there
|
||||||
|
|
||||||
|
# create the sheet and the header row and set the types for the columns
|
||||||
|
crsr.execute(
|
||||||
|
"create table SheetOne (Name varchar, Rank varchar, SrvcNum integer, Weight float, Birth date)"
|
||||||
|
)
|
||||||
|
|
||||||
|
sql = "INSERT INTO SheetOne (name, rank , srvcnum, weight, birth) values (?,?,?,?,?)"
|
||||||
|
|
||||||
|
data = ("Mike Murphy", "SSG", 123456789, 167.8, datetime.date(1922, 12, 27))
|
||||||
|
crsr.execute(sql, data) # write the first row of data
|
||||||
|
crsr.execute(
|
||||||
|
sql, ["John Jones", "Pvt", 987654321, 140.0, datetime.date(1921, 7, 4)]
|
||||||
|
) # another row of data
|
||||||
|
conn.close()
|
||||||
|
print("Created spreadsheet=%s worksheet=%s" % (filename, "SheetOne"))
|
41
lib/adodbapi/is64bit.py
Normal file
41
lib/adodbapi/is64bit.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def Python():
|
||||||
|
if sys.platform == "cli": # IronPython
|
||||||
|
import System
|
||||||
|
|
||||||
|
return System.IntPtr.Size == 8
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return sys.maxsize > 2147483647
|
||||||
|
except AttributeError:
|
||||||
|
return sys.maxint > 2147483647
|
||||||
|
|
||||||
|
|
||||||
|
def os():
|
||||||
|
import platform
|
||||||
|
|
||||||
|
pm = platform.machine()
|
||||||
|
if pm != ".." and pm.endswith("64"): # recent Python (not Iron)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
import os
|
||||||
|
|
||||||
|
if "PROCESSOR_ARCHITEW6432" in os.environ:
|
||||||
|
return True # 32 bit program running on 64 bit Windows
|
||||||
|
try:
|
||||||
|
return os.environ["PROCESSOR_ARCHITECTURE"].endswith(
|
||||||
|
"64"
|
||||||
|
) # 64 bit Windows 64 bit program
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
pass # not Windows
|
||||||
|
try:
|
||||||
|
return "64" in platform.architecture()[0] # this often works in Linux
|
||||||
|
except:
|
||||||
|
return False # is an older version of Python, assume also an older os (best we can guess)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("is64bit.Python() =", Python(), "is64bit.os() =", os())
|
506
lib/adodbapi/license.txt
Normal file
506
lib/adodbapi/license.txt
Normal file
|
@ -0,0 +1,506 @@
|
||||||
|
GNU LESSER GENERAL PUBLIC LICENSE
|
||||||
|
Version 2.1, February 1999
|
||||||
|
|
||||||
|
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||||
|
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
[This is the first released version of the Lesser GPL. It also counts
|
||||||
|
as the successor of the GNU Library Public License, version 2, hence
|
||||||
|
the version number 2.1.]
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The licenses for most software are designed to take away your
|
||||||
|
freedom to share and change it. By contrast, the GNU General Public
|
||||||
|
Licenses are intended to guarantee your freedom to share and change
|
||||||
|
free software--to make sure the software is free for all its users.
|
||||||
|
|
||||||
|
This license, the Lesser General Public License, applies to some
|
||||||
|
specially designated software packages--typically libraries--of the
|
||||||
|
Free Software Foundation and other authors who decide to use it. You
|
||||||
|
can use it too, but we suggest you first think carefully about whether
|
||||||
|
this license or the ordinary General Public License is the better
|
||||||
|
strategy to use in any particular case, based on the explanations below.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom of use,
|
||||||
|
not price. Our General Public Licenses are designed to make sure that
|
||||||
|
you have the freedom to distribute copies of free software (and charge
|
||||||
|
for this service if you wish); that you receive source code or can get
|
||||||
|
it if you want it; that you can change the software and use pieces of
|
||||||
|
it in new free programs; and that you are informed that you can do
|
||||||
|
these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to make restrictions that forbid
|
||||||
|
distributors to deny you these rights or to ask you to surrender these
|
||||||
|
rights. These restrictions translate to certain responsibilities for
|
||||||
|
you if you distribute copies of the library or if you modify it.
|
||||||
|
|
||||||
|
For example, if you distribute copies of the library, whether gratis
|
||||||
|
or for a fee, you must give the recipients all the rights that we gave
|
||||||
|
you. You must make sure that they, too, receive or can get the source
|
||||||
|
code. If you link other code with the library, you must provide
|
||||||
|
complete object files to the recipients, so that they can relink them
|
||||||
|
with the library after making changes to the library and recompiling
|
||||||
|
it. And you must show them these terms so they know their rights.
|
||||||
|
|
||||||
|
We protect your rights with a two-step method: (1) we copyright the
|
||||||
|
library, and (2) we offer you this license, which gives you legal
|
||||||
|
permission to copy, distribute and/or modify the library.
|
||||||
|
|
||||||
|
To protect each distributor, we want to make it very clear that
|
||||||
|
there is no warranty for the free library. Also, if the library is
|
||||||
|
modified by someone else and passed on, the recipients should know
|
||||||
|
that what they have is not the original version, so that the original
|
||||||
|
author's reputation will not be affected by problems that might be
|
||||||
|
introduced by others.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Finally, software patents pose a constant threat to the existence of
|
||||||
|
any free program. We wish to make sure that a company cannot
|
||||||
|
effectively restrict the users of a free program by obtaining a
|
||||||
|
restrictive license from a patent holder. Therefore, we insist that
|
||||||
|
any patent license obtained for a version of the library must be
|
||||||
|
consistent with the full freedom of use specified in this license.
|
||||||
|
|
||||||
|
Most GNU software, including some libraries, is covered by the
|
||||||
|
ordinary GNU General Public License. This license, the GNU Lesser
|
||||||
|
General Public License, applies to certain designated libraries, and
|
||||||
|
is quite different from the ordinary General Public License. We use
|
||||||
|
this license for certain libraries in order to permit linking those
|
||||||
|
libraries into non-free programs.
|
||||||
|
|
||||||
|
When a program is linked with a library, whether statically or using
|
||||||
|
a shared library, the combination of the two is legally speaking a
|
||||||
|
combined work, a derivative of the original library. The ordinary
|
||||||
|
General Public License therefore permits such linking only if the
|
||||||
|
entire combination fits its criteria of freedom. The Lesser General
|
||||||
|
Public License permits more lax criteria for linking other code with
|
||||||
|
the library.
|
||||||
|
|
||||||
|
We call this license the "Lesser" General Public License because it
|
||||||
|
does Less to protect the user's freedom than the ordinary General
|
||||||
|
Public License. It also provides other free software developers Less
|
||||||
|
of an advantage over competing non-free programs. These disadvantages
|
||||||
|
are the reason we use the ordinary General Public License for many
|
||||||
|
libraries. However, the Lesser license provides advantages in certain
|
||||||
|
special circumstances.
|
||||||
|
|
||||||
|
For example, on rare occasions, there may be a special need to
|
||||||
|
encourage the widest possible use of a certain library, so that it becomes
|
||||||
|
a de-facto standard. To achieve this, non-free programs must be
|
||||||
|
allowed to use the library. A more frequent case is that a free
|
||||||
|
library does the same job as widely used non-free libraries. In this
|
||||||
|
case, there is little to gain by limiting the free library to free
|
||||||
|
software only, so we use the Lesser General Public License.
|
||||||
|
|
||||||
|
In other cases, permission to use a particular library in non-free
|
||||||
|
programs enables a greater number of people to use a large body of
|
||||||
|
free software. For example, permission to use the GNU C Library in
|
||||||
|
non-free programs enables many more people to use the whole GNU
|
||||||
|
operating system, as well as its variant, the GNU/Linux operating
|
||||||
|
system.
|
||||||
|
|
||||||
|
Although the Lesser General Public License is Less protective of the
|
||||||
|
users' freedom, it does ensure that the user of a program that is
|
||||||
|
linked with the Library has the freedom and the wherewithal to run
|
||||||
|
that program using a modified version of the Library.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow. Pay close attention to the difference between a
|
||||||
|
"work based on the library" and a "work that uses the library". The
|
||||||
|
former contains code derived from the library, whereas the latter must
|
||||||
|
be combined with the library in order to run.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
GNU LESSER GENERAL PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. This License Agreement applies to any software library or other
|
||||||
|
program which contains a notice placed by the copyright holder or
|
||||||
|
other authorized party saying it may be distributed under the terms of
|
||||||
|
this Lesser General Public License (also called "this License").
|
||||||
|
Each licensee is addressed as "you".
|
||||||
|
|
||||||
|
A "library" means a collection of software functions and/or data
|
||||||
|
prepared so as to be conveniently linked with application programs
|
||||||
|
(which use some of those functions and data) to form executables.
|
||||||
|
|
||||||
|
The "Library", below, refers to any such software library or work
|
||||||
|
which has been distributed under these terms. A "work based on the
|
||||||
|
Library" means either the Library or any derivative work under
|
||||||
|
copyright law: that is to say, a work containing the Library or a
|
||||||
|
portion of it, either verbatim or with modifications and/or translated
|
||||||
|
straightforwardly into another language. (Hereinafter, translation is
|
||||||
|
included without limitation in the term "modification".)
|
||||||
|
|
||||||
|
"Source code" for a work means the preferred form of the work for
|
||||||
|
making modifications to it. For a library, complete source code means
|
||||||
|
all the source code for all modules it contains, plus any associated
|
||||||
|
interface definition files, plus the scripts used to control compilation
|
||||||
|
and installation of the library.
|
||||||
|
|
||||||
|
Activities other than copying, distribution and modification are not
|
||||||
|
covered by this License; they are outside its scope. The act of
|
||||||
|
running a program using the Library is not restricted, and output from
|
||||||
|
such a program is covered only if its contents constitute a work based
|
||||||
|
on the Library (independent of the use of the Library in a tool for
|
||||||
|
writing it). Whether that is true depends on what the Library does
|
||||||
|
and what the program that uses the Library does.
|
||||||
|
|
||||||
|
1. You may copy and distribute verbatim copies of the Library's
|
||||||
|
complete source code as you receive it, in any medium, provided that
|
||||||
|
you conspicuously and appropriately publish on each copy an
|
||||||
|
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||||
|
all the notices that refer to this License and to the absence of any
|
||||||
|
warranty; and distribute a copy of this License along with the
|
||||||
|
Library.
|
||||||
|
You may charge a fee for the physical act of transferring a copy,
|
||||||
|
and you may at your option offer warranty protection in exchange for a
|
||||||
|
fee.
|
||||||
|
|
||||||
|
2. You may modify your copy or copies of the Library or any portion
|
||||||
|
of it, thus forming a work based on the Library, and copy and
|
||||||
|
distribute such modifications or work under the terms of Section 1
|
||||||
|
above, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The modified work must itself be a software library.
|
||||||
|
|
||||||
|
b) You must cause the files modified to carry prominent notices
|
||||||
|
stating that you changed the files and the date of any change.
|
||||||
|
|
||||||
|
c) You must cause the whole of the work to be licensed at no
|
||||||
|
charge to all third parties under the terms of this License.
|
||||||
|
|
||||||
|
d) If a facility in the modified Library refers to a function or a
|
||||||
|
table of data to be supplied by an application program that uses
|
||||||
|
the facility, other than as an argument passed when the facility
|
||||||
|
is invoked, then you must make a good faith effort to ensure that,
|
||||||
|
in the event an application does not supply such function or
|
||||||
|
table, the facility still operates, and performs whatever part of
|
||||||
|
its purpose remains meaningful.
|
||||||
|
|
||||||
|
(For example, a function in a library to compute square roots has
|
||||||
|
a purpose that is entirely well-defined independent of the
|
||||||
|
application. Therefore, Subsection 2d requires that any
|
||||||
|
application-supplied function or table used by this function must
|
||||||
|
be optional: if the application does not supply it, the square
|
||||||
|
root function must still compute square roots.)
|
||||||
|
|
||||||
|
These requirements apply to the modified work as a whole. If
|
||||||
|
identifiable sections of that work are not derived from the Library,
|
||||||
|
and can be reasonably considered independent and separate works in
|
||||||
|
themselves, then this License, and its terms, do not apply to those
|
||||||
|
sections when you distribute them as separate works. But when you
|
||||||
|
distribute the same sections as part of a whole which is a work based
|
||||||
|
on the Library, the distribution of the whole must be on the terms of
|
||||||
|
this License, whose permissions for other licensees extend to the
|
||||||
|
entire whole, and thus to each and every part regardless of who wrote
|
||||||
|
it.
|
||||||
|
|
||||||
|
Thus, it is not the intent of this section to claim rights or contest
|
||||||
|
your rights to work written entirely by you; rather, the intent is to
|
||||||
|
exercise the right to control the distribution of derivative or
|
||||||
|
collective works based on the Library.
|
||||||
|
|
||||||
|
In addition, mere aggregation of another work not based on the Library
|
||||||
|
with the Library (or with a work based on the Library) on a volume of
|
||||||
|
a storage or distribution medium does not bring the other work under
|
||||||
|
the scope of this License.
|
||||||
|
|
||||||
|
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||||
|
License instead of this License to a given copy of the Library. To do
|
||||||
|
this, you must alter all the notices that refer to this License, so
|
||||||
|
that they refer to the ordinary GNU General Public License, version 2,
|
||||||
|
instead of to this License. (If a newer version than version 2 of the
|
||||||
|
ordinary GNU General Public License has appeared, then you can specify
|
||||||
|
that version instead if you wish.) Do not make any other change in
|
||||||
|
these notices.
|
||||||
|
|
||||||
|
Once this change is made in a given copy, it is irreversible for
|
||||||
|
that copy, so the ordinary GNU General Public License applies to all
|
||||||
|
subsequent copies and derivative works made from that copy.
|
||||||
|
|
||||||
|
This option is useful when you wish to copy part of the code of
|
||||||
|
the Library into a program that is not a library.
|
||||||
|
|
||||||
|
4. You may copy and distribute the Library (or a portion or
|
||||||
|
derivative of it, under Section 2) in object code or executable form
|
||||||
|
under the terms of Sections 1 and 2 above provided that you accompany
|
||||||
|
it with the complete corresponding machine-readable source code, which
|
||||||
|
must be distributed under the terms of Sections 1 and 2 above on a
|
||||||
|
medium customarily used for software interchange.
|
||||||
|
|
||||||
|
If distribution of object code is made by offering access to copy
|
||||||
|
from a designated place, then offering equivalent access to copy the
|
||||||
|
source code from the same place satisfies the requirement to
|
||||||
|
distribute the source code, even though third parties are not
|
||||||
|
compelled to copy the source along with the object code.
|
||||||
|
|
||||||
|
5. A program that contains no derivative of any portion of the
|
||||||
|
Library, but is designed to work with the Library by being compiled or
|
||||||
|
linked with it, is called a "work that uses the Library". Such a
|
||||||
|
work, in isolation, is not a derivative work of the Library, and
|
||||||
|
therefore falls outside the scope of this License.
|
||||||
|
|
||||||
|
However, linking a "work that uses the Library" with the Library
|
||||||
|
creates an executable that is a derivative of the Library (because it
|
||||||
|
contains portions of the Library), rather than a "work that uses the
|
||||||
|
library". The executable is therefore covered by this License.
|
||||||
|
Section 6 states terms for distribution of such executables.
|
||||||
|
|
||||||
|
When a "work that uses the Library" uses material from a header file
|
||||||
|
that is part of the Library, the object code for the work may be a
|
||||||
|
derivative work of the Library even though the source code is not.
|
||||||
|
Whether this is true is especially significant if the work can be
|
||||||
|
linked without the Library, or if the work is itself a library. The
|
||||||
|
threshold for this to be true is not precisely defined by law.
|
||||||
|
|
||||||
|
If such an object file uses only numerical parameters, data
|
||||||
|
structure layouts and accessors, and small macros and small inline
|
||||||
|
functions (ten lines or less in length), then the use of the object
|
||||||
|
file is unrestricted, regardless of whether it is legally a derivative
|
||||||
|
work. (Executables containing this object code plus portions of the
|
||||||
|
Library will still fall under Section 6.)
|
||||||
|
|
||||||
|
Otherwise, if the work is a derivative of the Library, you may
|
||||||
|
distribute the object code for the work under the terms of Section 6.
|
||||||
|
Any executables containing that work also fall under Section 6,
|
||||||
|
whether or not they are linked directly with the Library itself.
|
||||||
|
|
||||||
|
6. As an exception to the Sections above, you may also combine or
|
||||||
|
link a "work that uses the Library" with the Library to produce a
|
||||||
|
work containing portions of the Library, and distribute that work
|
||||||
|
under terms of your choice, provided that the terms permit
|
||||||
|
modification of the work for the customer's own use and reverse
|
||||||
|
engineering for debugging such modifications.
|
||||||
|
|
||||||
|
You must give prominent notice with each copy of the work that the
|
||||||
|
Library is used in it and that the Library and its use are covered by
|
||||||
|
this License. You must supply a copy of this License. If the work
|
||||||
|
during execution displays copyright notices, you must include the
|
||||||
|
copyright notice for the Library among them, as well as a reference
|
||||||
|
directing the user to the copy of this License. Also, you must do one
|
||||||
|
of these things:
|
||||||
|
|
||||||
|
a) Accompany the work with the complete corresponding
|
||||||
|
machine-readable source code for the Library including whatever
|
||||||
|
changes were used in the work (which must be distributed under
|
||||||
|
Sections 1 and 2 above); and, if the work is an executable linked
|
||||||
|
with the Library, with the complete machine-readable "work that
|
||||||
|
uses the Library", as object code and/or source code, so that the
|
||||||
|
user can modify the Library and then relink to produce a modified
|
||||||
|
executable containing the modified Library. (It is understood
|
||||||
|
that the user who changes the contents of definitions files in the
|
||||||
|
Library will not necessarily be able to recompile the application
|
||||||
|
to use the modified definitions.)
|
||||||
|
|
||||||
|
b) Use a suitable shared library mechanism for linking with the
|
||||||
|
Library. A suitable mechanism is one that (1) uses at run time a
|
||||||
|
copy of the library already present on the user's computer system,
|
||||||
|
rather than copying library functions into the executable, and (2)
|
||||||
|
will operate properly with a modified version of the library, if
|
||||||
|
the user installs one, as long as the modified version is
|
||||||
|
interface-compatible with the version that the work was made with.
|
||||||
|
|
||||||
|
c) Accompany the work with a written offer, valid for at
|
||||||
|
least three years, to give the same user the materials
|
||||||
|
specified in Subsection 6a, above, for a charge no more
|
||||||
|
than the cost of performing this distribution.
|
||||||
|
|
||||||
|
d) If distribution of the work is made by offering access to copy
|
||||||
|
from a designated place, offer equivalent access to copy the above
|
||||||
|
specified materials from the same place.
|
||||||
|
|
||||||
|
e) Verify that the user has already received a copy of these
|
||||||
|
materials or that you have already sent this user a copy.
|
||||||
|
|
||||||
|
For an executable, the required form of the "work that uses the
|
||||||
|
Library" must include any data and utility programs needed for
|
||||||
|
reproducing the executable from it. However, as a special exception,
|
||||||
|
the materials to be distributed need not include anything that is
|
||||||
|
normally distributed (in either source or binary form) with the major
|
||||||
|
components (compiler, kernel, and so on) of the operating system on
|
||||||
|
which the executable runs, unless that component itself accompanies
|
||||||
|
the executable.
|
||||||
|
|
||||||
|
It may happen that this requirement contradicts the license
|
||||||
|
restrictions of other proprietary libraries that do not normally
|
||||||
|
accompany the operating system. Such a contradiction means you cannot
|
||||||
|
use both them and the Library together in an executable that you
|
||||||
|
distribute.
|
||||||
|
|
||||||
|
7. You may place library facilities that are a work based on the
|
||||||
|
Library side-by-side in a single library together with other library
|
||||||
|
facilities not covered by this License, and distribute such a combined
|
||||||
|
library, provided that the separate distribution of the work based on
|
||||||
|
the Library and of the other library facilities is otherwise
|
||||||
|
permitted, and provided that you do these two things:
|
||||||
|
|
||||||
|
a) Accompany the combined library with a copy of the same work
|
||||||
|
based on the Library, uncombined with any other library
|
||||||
|
facilities. This must be distributed under the terms of the
|
||||||
|
Sections above.
|
||||||
|
|
||||||
|
b) Give prominent notice with the combined library of the fact
|
||||||
|
that part of it is a work based on the Library, and explaining
|
||||||
|
where to find the accompanying uncombined form of the same work.
|
||||||
|
|
||||||
|
8. You may not copy, modify, sublicense, link with, or distribute
|
||||||
|
the Library except as expressly provided under this License. Any
|
||||||
|
attempt otherwise to copy, modify, sublicense, link with, or
|
||||||
|
distribute the Library is void, and will automatically terminate your
|
||||||
|
rights under this License. However, parties who have received copies,
|
||||||
|
or rights, from you under this License will not have their licenses
|
||||||
|
terminated so long as such parties remain in full compliance.
|
||||||
|
|
||||||
|
9. You are not required to accept this License, since you have not
|
||||||
|
signed it. However, nothing else grants you permission to modify or
|
||||||
|
distribute the Library or its derivative works. These actions are
|
||||||
|
prohibited by law if you do not accept this License. Therefore, by
|
||||||
|
modifying or distributing the Library (or any work based on the
|
||||||
|
Library), you indicate your acceptance of this License to do so, and
|
||||||
|
all its terms and conditions for copying, distributing or modifying
|
||||||
|
the Library or works based on it.
|
||||||
|
|
||||||
|
10. Each time you redistribute the Library (or any work based on the
|
||||||
|
Library), the recipient automatically receives a license from the
|
||||||
|
original licensor to copy, distribute, link with or modify the Library
|
||||||
|
subject to these terms and conditions. You may not impose any further
|
||||||
|
restrictions on the recipients' exercise of the rights granted herein.
|
||||||
|
You are not responsible for enforcing compliance by third parties with
|
||||||
|
this License.
|
||||||
|
|
||||||
|
11. If, as a consequence of a court judgment or allegation of patent
|
||||||
|
infringement or for any other reason (not limited to patent issues),
|
||||||
|
conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot
|
||||||
|
distribute so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you
|
||||||
|
may not distribute the Library at all. For example, if a patent
|
||||||
|
license would not permit royalty-free redistribution of the Library by
|
||||||
|
all those who receive copies directly or indirectly through you, then
|
||||||
|
the only way you could satisfy both it and this License would be to
|
||||||
|
refrain entirely from distribution of the Library.
|
||||||
|
|
||||||
|
If any portion of this section is held invalid or unenforceable under any
|
||||||
|
particular circumstance, the balance of the section is intended to apply,
|
||||||
|
and the section as a whole is intended to apply in other circumstances.
|
||||||
|
|
||||||
|
It is not the purpose of this section to induce you to infringe any
|
||||||
|
patents or other property right claims or to contest validity of any
|
||||||
|
such claims; this section has the sole purpose of protecting the
|
||||||
|
integrity of the free software distribution system which is
|
||||||
|
implemented by public license practices. Many people have made
|
||||||
|
generous contributions to the wide range of software distributed
|
||||||
|
through that system in reliance on consistent application of that
|
||||||
|
system; it is up to the author/donor to decide if he or she is willing
|
||||||
|
to distribute software through any other system and a licensee cannot
|
||||||
|
impose that choice.
|
||||||
|
|
||||||
|
This section is intended to make thoroughly clear what is believed to
|
||||||
|
be a consequence of the rest of this License.
|
||||||
|
|
||||||
|
12. If the distribution and/or use of the Library is restricted in
|
||||||
|
certain countries either by patents or by copyrighted interfaces, the
|
||||||
|
original copyright holder who places the Library under this License may add
|
||||||
|
an explicit geographical distribution limitation excluding those countries,
|
||||||
|
so that distribution is permitted only in or among countries not thus
|
||||||
|
excluded. In such case, this License incorporates the limitation as if
|
||||||
|
written in the body of this License.
|
||||||
|
|
||||||
|
13. The Free Software Foundation may publish revised and/or new
|
||||||
|
versions of the Lesser General Public License from time to time.
|
||||||
|
Such new versions will be similar in spirit to the present version,
|
||||||
|
but may differ in detail to address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the Library
|
||||||
|
specifies a version number of this License which applies to it and
|
||||||
|
"any later version", you have the option of following the terms and
|
||||||
|
conditions either of that version or of any later version published by
|
||||||
|
the Free Software Foundation. If the Library does not specify a
|
||||||
|
license version number, you may choose any version ever published by
|
||||||
|
the Free Software Foundation.
|
||||||
|
|
||||||
|
14. If you wish to incorporate parts of the Library into other free
|
||||||
|
programs whose distribution conditions are incompatible with these,
|
||||||
|
write to the author to ask for permission. For software which is
|
||||||
|
copyrighted by the Free Software Foundation, write to the Free
|
||||||
|
Software Foundation; we sometimes make exceptions for this. Our
|
||||||
|
decision will be guided by the two goals of preserving the free status
|
||||||
|
of all derivatives of our free software and of promoting the sharing
|
||||||
|
and reuse of software generally.
|
||||||
|
|
||||||
|
NO WARRANTY
|
||||||
|
|
||||||
|
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||||
|
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||||
|
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||||
|
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||||
|
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||||
|
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||||
|
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||||
|
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||||
|
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||||
|
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||||
|
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||||
|
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||||
|
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||||
|
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||||
|
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||||
|
DAMAGES.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Libraries
|
||||||
|
|
||||||
|
If you develop a new library, and you want it to be of the greatest
|
||||||
|
possible use to the public, we recommend making it free software that
|
||||||
|
everyone can redistribute and change. You can do so by permitting
|
||||||
|
redistribution under these terms (or, alternatively, under the terms of the
|
||||||
|
ordinary General Public License).
|
||||||
|
|
||||||
|
To apply these terms, attach the following notices to the library. It is
|
||||||
|
safest to attach them to the start of each source file to most effectively
|
||||||
|
convey the exclusion of warranty; and each file should have at least the
|
||||||
|
"copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the library's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or your
|
||||||
|
school, if any, to sign a "copyright disclaimer" for the library, if
|
||||||
|
necessary. Here is a sample; alter the names:
|
||||||
|
|
||||||
|
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||||
|
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
|
||||||
|
|
||||||
|
<signature of Ty Coon>, 1 April 1990
|
||||||
|
Ty Coon, President of Vice
|
||||||
|
|
||||||
|
That's all there is to it!
|
||||||
|
|
144
lib/adodbapi/process_connect_string.py
Normal file
144
lib/adodbapi/process_connect_string.py
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)"""
|
||||||
|
from . import is64bit as is64bit
|
||||||
|
|
||||||
|
|
||||||
|
def macro_call(macro_name, args, kwargs):
|
||||||
|
"""allow the programmer to perform limited processing on the server by passing macro names and args
|
||||||
|
|
||||||
|
:new_key - the key name the macro will create
|
||||||
|
:args[0] - macro name
|
||||||
|
:args[1:] - any arguments
|
||||||
|
:code - the value of the keyword item
|
||||||
|
:kwargs - the connection keyword dictionary. ??key has been removed
|
||||||
|
--> the value to put in for kwargs['name'] = value
|
||||||
|
"""
|
||||||
|
if isinstance(args, (str, str)):
|
||||||
|
args = [
|
||||||
|
args
|
||||||
|
] # the user forgot to pass a sequence, so make a string into args[0]
|
||||||
|
new_key = args[0]
|
||||||
|
try:
|
||||||
|
if macro_name == "is64bit":
|
||||||
|
if is64bit.Python(): # if on 64 bit Python
|
||||||
|
return new_key, args[1] # return first argument
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return new_key, args[2] # else return second argument (if defined)
|
||||||
|
except IndexError:
|
||||||
|
return new_key, "" # else return blank
|
||||||
|
|
||||||
|
elif (
|
||||||
|
macro_name == "getuser"
|
||||||
|
): # get the name of the user the server is logged in under
|
||||||
|
if not new_key in kwargs:
|
||||||
|
import getpass
|
||||||
|
|
||||||
|
return new_key, getpass.getuser()
|
||||||
|
|
||||||
|
elif macro_name == "getnode": # get the name of the computer running the server
|
||||||
|
import platform
|
||||||
|
|
||||||
|
try:
|
||||||
|
return new_key, args[1] % platform.node()
|
||||||
|
except IndexError:
|
||||||
|
return new_key, platform.node()
|
||||||
|
|
||||||
|
elif macro_name == "getenv": # expand the server's environment variable args[1]
|
||||||
|
try:
|
||||||
|
dflt = args[2] # if not found, default from args[2]
|
||||||
|
except IndexError: # or blank
|
||||||
|
dflt = ""
|
||||||
|
return new_key, os.environ.get(args[1], dflt)
|
||||||
|
|
||||||
|
elif macro_name == "auto_security":
|
||||||
|
if (
|
||||||
|
not "user" in kwargs or not kwargs["user"]
|
||||||
|
): # missing, blank, or Null username
|
||||||
|
return new_key, "Integrated Security=SSPI"
|
||||||
|
return new_key, "User ID=%(user)s; Password=%(password)s" % kwargs
|
||||||
|
|
||||||
|
elif (
|
||||||
|
macro_name == "find_temp_test_path"
|
||||||
|
): # helper function for testing ado operation -- undocumented
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
return new_key, os.path.join(
|
||||||
|
tempfile.gettempdir(), "adodbapi_test", args[1]
|
||||||
|
)
|
||||||
|
|
||||||
|
raise ValueError("Unknown connect string macro=%s" % macro_name)
|
||||||
|
except:
|
||||||
|
raise ValueError("Error in macro processing %s %s" % (macro_name, repr(args)))
|
||||||
|
|
||||||
|
|
||||||
|
def process(
|
||||||
|
args, kwargs, expand_macros=False
|
||||||
|
): # --> connection string with keyword arguments processed.
|
||||||
|
"""attempts to inject arguments into a connection string using Python "%" operator for strings
|
||||||
|
|
||||||
|
co: adodbapi connection object
|
||||||
|
args: positional parameters from the .connect() call
|
||||||
|
kvargs: keyword arguments from the .connect() call
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
dsn = args[0]
|
||||||
|
except IndexError:
|
||||||
|
dsn = None
|
||||||
|
if isinstance(
|
||||||
|
dsn, dict
|
||||||
|
): # as a convenience the first argument may be django settings
|
||||||
|
kwargs.update(dsn)
|
||||||
|
elif (
|
||||||
|
dsn
|
||||||
|
): # the connection string is passed to the connection as part of the keyword dictionary
|
||||||
|
kwargs["connection_string"] = dsn
|
||||||
|
try:
|
||||||
|
a1 = args[1]
|
||||||
|
except IndexError:
|
||||||
|
a1 = None
|
||||||
|
# historically, the second positional argument might be a timeout value
|
||||||
|
if isinstance(a1, int):
|
||||||
|
kwargs["timeout"] = a1
|
||||||
|
# if the second positional argument is a string, then it is user
|
||||||
|
elif isinstance(a1, str):
|
||||||
|
kwargs["user"] = a1
|
||||||
|
# if the second positional argument is a dictionary, use it as keyword arguments, too
|
||||||
|
elif isinstance(a1, dict):
|
||||||
|
kwargs.update(a1)
|
||||||
|
try:
|
||||||
|
kwargs["password"] = args[2] # the third positional argument is password
|
||||||
|
kwargs["host"] = args[3] # the fourth positional argument is host name
|
||||||
|
kwargs["database"] = args[4] # the fifth positional argument is database name
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# make sure connection string is defined somehow
|
||||||
|
if not "connection_string" in kwargs:
|
||||||
|
try: # perhaps 'dsn' was defined
|
||||||
|
kwargs["connection_string"] = kwargs["dsn"]
|
||||||
|
except KeyError:
|
||||||
|
try: # as a last effort, use the "host" keyword
|
||||||
|
kwargs["connection_string"] = kwargs["host"]
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("Must define 'connection_string' for ado connections")
|
||||||
|
if expand_macros:
|
||||||
|
for kwarg in list(kwargs.keys()):
|
||||||
|
if kwarg.startswith("macro_"): # If a key defines a macro
|
||||||
|
macro_name = kwarg[6:] # name without the "macro_"
|
||||||
|
macro_code = kwargs.pop(
|
||||||
|
kwarg
|
||||||
|
) # we remove the macro_key and get the code to execute
|
||||||
|
new_key, rslt = macro_call(
|
||||||
|
macro_name, macro_code, kwargs
|
||||||
|
) # run the code in the local context
|
||||||
|
kwargs[new_key] = rslt # put the result back in the keywords dict
|
||||||
|
# special processing for PyRO IPv6 host address
|
||||||
|
try:
|
||||||
|
s = kwargs["proxy_host"]
|
||||||
|
if ":" in s: # it is an IPv6 address
|
||||||
|
if s[0] != "[": # is not surrounded by brackets
|
||||||
|
kwargs["proxy_host"] = s.join(("[", "]")) # put it in brackets
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return kwargs
|
92
lib/adodbapi/readme.txt
Normal file
92
lib/adodbapi/readme.txt
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
Project
|
||||||
|
-------
|
||||||
|
adodbapi
|
||||||
|
|
||||||
|
A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO
|
||||||
|
for connecting with databases and other data sources
|
||||||
|
using either CPython or IronPython.
|
||||||
|
|
||||||
|
Home page: <http://sourceforge.net/projects/adodbapi>
|
||||||
|
|
||||||
|
Features:
|
||||||
|
* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations).
|
||||||
|
* Includes pyunit testcases that describe how to use the module.
|
||||||
|
* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+
|
||||||
|
* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions.
|
||||||
|
* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic'
|
||||||
|
* Supports data retrieval by column name e.g.:
|
||||||
|
for row in myCurser.execute("select name,age from students"):
|
||||||
|
print("Student", row.name, "is", row.age, "years old.")
|
||||||
|
* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column)
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
* C Python 2.7 or 3.5 or higher
|
||||||
|
and pywin32 (Mark Hammond's python for windows extensions.)
|
||||||
|
or
|
||||||
|
Iron Python 2.7 or higher. (works in IPy2.0 for all data types except BUFFER)
|
||||||
|
|
||||||
|
Installation:
|
||||||
|
* (C-Python on Windows): Install pywin32 ("pip install pywin32") which includes adodbapi.
|
||||||
|
* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip.
|
||||||
|
Open a command window as an administrator. CD to the folder containing the unzipped files.
|
||||||
|
Run "setup.py install" using the IronPython of your choice.
|
||||||
|
|
||||||
|
NOTE: ...........
|
||||||
|
If you do not like the new default operation of returning Numeric columns as decimal.Decimal,
|
||||||
|
you can select other options by the user defined conversion feature.
|
||||||
|
Try:
|
||||||
|
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString
|
||||||
|
or:
|
||||||
|
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat
|
||||||
|
or:
|
||||||
|
adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function
|
||||||
|
............
|
||||||
|
notes for 2.6.2:
|
||||||
|
The definitive source has been moved to https://github.com/mhammond/pywin32/tree/master/adodbapi.
|
||||||
|
Remote has proven too hard to configure and test with Pyro4. I am moving it to unsupported status
|
||||||
|
until I can change to a different connection method.
|
||||||
|
whats new in version 2.6
|
||||||
|
A cursor.prepare() method and support for prepared SQL statements.
|
||||||
|
Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code).
|
||||||
|
The quick start document 'quick_reference.odt' will export as a nice-looking pdf.
|
||||||
|
Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of
|
||||||
|
parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_
|
||||||
|
pass a dictionary of parameters -- provided your SQL operation string is formatted correctly.
|
||||||
|
|
||||||
|
whats new in version 2.5
|
||||||
|
Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO
|
||||||
|
Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server
|
||||||
|
(server has simple connection string macros: is64bit, getuser, sql_provider, auto_security)
|
||||||
|
Brief documentation included. See adodbapi/examples folder adodbapi.rtf
|
||||||
|
New connection method conn.get_table_names() --> list of names of tables in database
|
||||||
|
|
||||||
|
Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module.
|
||||||
|
Many former module-level attributes are now class attributes. (Should be more thread-safe)
|
||||||
|
Connection objects are now context managers for transactions and will commit or rollback.
|
||||||
|
Cursor objects are context managers and will automatically close themselves.
|
||||||
|
Autocommit can be switched on and off.
|
||||||
|
Keyword and positional arguments on the connect() method work as documented in PEP 249.
|
||||||
|
Keyword arguments from the connect call can be formatted into the connection string.
|
||||||
|
New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port.
|
||||||
|
*** Breaking change: variantConversion lookups are simplified: the following will raise KeyError:
|
||||||
|
oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes]
|
||||||
|
Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]]
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
LGPL, see http://www.opensource.org/licenses/lgpl-license.php
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Look at adodbapi/quick_reference.md
|
||||||
|
http://www.python.org/topics/database/DatabaseAPI-2.0.html
|
||||||
|
read the examples in adodbapi/examples
|
||||||
|
and look at the test cases in adodbapi/test directory.
|
||||||
|
|
||||||
|
Mailing lists
|
||||||
|
-------------
|
||||||
|
The adodbapi mailing lists have been deactivated. Submit comments to the
|
||||||
|
pywin32 or IronPython mailing lists.
|
||||||
|
-- the bug tracker on sourceforge.net/projects/adodbapi may be checked, (infrequently).
|
||||||
|
-- please use: https://github.com/mhammond/pywin32/issues
|
634
lib/adodbapi/remote.py
Normal file
634
lib/adodbapi/remote.py
Normal file
|
@ -0,0 +1,634 @@
|
||||||
|
"""adodbapi.remote - A python DB API 2.0 (PEP 249) interface to Microsoft ADO
|
||||||
|
|
||||||
|
Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole
|
||||||
|
* http://sourceforge.net/projects/pywin32
|
||||||
|
* http://sourceforge.net/projects/adodbapi
|
||||||
|
|
||||||
|
This library is free software; you can redistribute it and/or
|
||||||
|
modify it under the terms of the GNU Lesser General Public
|
||||||
|
License as published by the Free Software Foundation; either
|
||||||
|
version 2.1 of the License, or (at your option) any later version.
|
||||||
|
|
||||||
|
This library is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||||
|
Lesser General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Lesser General Public
|
||||||
|
License along with this library; if not, write to the Free Software
|
||||||
|
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
|
||||||
|
django adaptations and refactoring thanks to Adam Vandenberg
|
||||||
|
|
||||||
|
DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/
|
||||||
|
|
||||||
|
This module source should run correctly in CPython versions 2.5 and later,
|
||||||
|
or IronPython version 2.7 and later,
|
||||||
|
or, after running through 2to3.py, CPython 3.0 or later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "2.6.0.4"
|
||||||
|
version = "adodbapi.remote v" + __version__
|
||||||
|
|
||||||
|
import array
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Pyro4 is required for server and remote operation --> https://pypi.python.org/pypi/Pyro4/
|
||||||
|
try:
|
||||||
|
import Pyro4
|
||||||
|
except ImportError:
|
||||||
|
print('* * * Sorry, server operation requires Pyro4. Please "pip import" it.')
|
||||||
|
exit(11)
|
||||||
|
|
||||||
|
import adodbapi
|
||||||
|
import adodbapi.apibase as api
|
||||||
|
import adodbapi.process_connect_string
|
||||||
|
from adodbapi.apibase import ProgrammingError
|
||||||
|
|
||||||
|
_BaseException = api._BaseException
|
||||||
|
|
||||||
|
sys.excepthook = Pyro4.util.excepthook
|
||||||
|
Pyro4.config.PREFER_IP_VERSION = 0 # allow system to prefer IPv6
|
||||||
|
Pyro4.config.COMMTIMEOUT = 40.0 # a bit longer than the default SQL server Gtimeout
|
||||||
|
Pyro4.config.SERIALIZER = "pickle"
|
||||||
|
|
||||||
|
try:
|
||||||
|
verbose = int(os.environ["ADODBAPI_VERBOSE"])
|
||||||
|
except:
|
||||||
|
verbose = False
|
||||||
|
if verbose:
|
||||||
|
print(version)
|
||||||
|
|
||||||
|
# --- define objects to smooth out Python3 <-> Python 2.x differences
|
||||||
|
unicodeType = str # this line will be altered by 2to3.py to '= str'
|
||||||
|
longType = int # this line will be altered by 2to3.py to '= int'
|
||||||
|
StringTypes = str
|
||||||
|
makeByteBuffer = bytes
|
||||||
|
memoryViewType = memoryview
|
||||||
|
|
||||||
|
# -----------------------------------------------------------
|
||||||
|
# conversion functions mandated by PEP 249
|
||||||
|
Binary = makeByteBuffer # override the function from apibase.py
|
||||||
|
|
||||||
|
|
||||||
|
def Date(year, month, day):
|
||||||
|
return datetime.date(year, month, day) # dateconverter.Date(year,month,day)
|
||||||
|
|
||||||
|
|
||||||
|
def Time(hour, minute, second):
|
||||||
|
return datetime.time(hour, minute, second) # dateconverter.Time(hour,minute,second)
|
||||||
|
|
||||||
|
|
||||||
|
def Timestamp(year, month, day, hour, minute, second):
|
||||||
|
return datetime.datetime(year, month, day, hour, minute, second)
|
||||||
|
|
||||||
|
|
||||||
|
def DateFromTicks(ticks):
|
||||||
|
return Date(*time.gmtime(ticks)[:3])
|
||||||
|
|
||||||
|
|
||||||
|
def TimeFromTicks(ticks):
|
||||||
|
return Time(*time.gmtime(ticks)[3:6])
|
||||||
|
|
||||||
|
|
||||||
|
def TimestampFromTicks(ticks):
|
||||||
|
return Timestamp(*time.gmtime(ticks)[:6])
|
||||||
|
|
||||||
|
|
||||||
|
def connect(*args, **kwargs): # --> a remote db-api connection object
|
||||||
|
"""Create and open a remote db-api database connection object"""
|
||||||
|
# process the argument list the programmer gave us
|
||||||
|
kwargs = adodbapi.process_connect_string.process(args, kwargs)
|
||||||
|
# the "proxy_xxx" keys tell us where to find the PyRO proxy server
|
||||||
|
kwargs.setdefault(
|
||||||
|
"pyro_connection", "PYRO:ado.connection@%(proxy_host)s:%(proxy_port)s"
|
||||||
|
)
|
||||||
|
if not "proxy_port" in kwargs:
|
||||||
|
try:
|
||||||
|
pport = os.environ["PROXY_PORT"]
|
||||||
|
except KeyError:
|
||||||
|
pport = 9099
|
||||||
|
kwargs["proxy_port"] = pport
|
||||||
|
if not "proxy_host" in kwargs or not kwargs["proxy_host"]:
|
||||||
|
try:
|
||||||
|
phost = os.environ["PROXY_HOST"]
|
||||||
|
except KeyError:
|
||||||
|
phost = "[::1]" # '127.0.0.1'
|
||||||
|
kwargs["proxy_host"] = phost
|
||||||
|
ado_uri = kwargs["pyro_connection"] % kwargs
|
||||||
|
# ask PyRO make us a remote connection object
|
||||||
|
auto_retry = 3
|
||||||
|
while auto_retry:
|
||||||
|
try:
|
||||||
|
dispatcher = Pyro4.Proxy(ado_uri)
|
||||||
|
if "comm_timeout" in kwargs:
|
||||||
|
dispatcher._pyroTimeout = float(kwargs["comm_timeout"])
|
||||||
|
uri = dispatcher.make_connection()
|
||||||
|
break
|
||||||
|
except Pyro4.core.errors.PyroError:
|
||||||
|
auto_retry -= 1
|
||||||
|
if auto_retry:
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
raise api.DatabaseError("Cannot create connection to=%s" % ado_uri)
|
||||||
|
|
||||||
|
conn_uri = fix_uri(uri, kwargs) # get a host connection from the proxy server
|
||||||
|
while auto_retry:
|
||||||
|
try:
|
||||||
|
host_conn = Pyro4.Proxy(
|
||||||
|
conn_uri
|
||||||
|
) # bring up an exclusive Pyro connection for my ADO connection
|
||||||
|
break
|
||||||
|
except Pyro4.core.errors.PyroError:
|
||||||
|
auto_retry -= 1
|
||||||
|
if auto_retry:
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
raise api.DatabaseError(
|
||||||
|
"Cannot create ADO connection object using=%s" % conn_uri
|
||||||
|
)
|
||||||
|
if "comm_timeout" in kwargs:
|
||||||
|
host_conn._pyroTimeout = float(kwargs["comm_timeout"])
|
||||||
|
# make a local clone
|
||||||
|
myConn = Connection()
|
||||||
|
while auto_retry:
|
||||||
|
try:
|
||||||
|
myConn.connect(
|
||||||
|
kwargs, host_conn
|
||||||
|
) # call my connect method -- hand him the host connection
|
||||||
|
break
|
||||||
|
except Pyro4.core.errors.PyroError:
|
||||||
|
auto_retry -= 1
|
||||||
|
if auto_retry:
|
||||||
|
time.sleep(1)
|
||||||
|
else:
|
||||||
|
raise api.DatabaseError(
|
||||||
|
"Pyro error creating connection to/thru=%s" % repr(kwargs)
|
||||||
|
)
|
||||||
|
except _BaseException as e:
|
||||||
|
raise api.DatabaseError(
|
||||||
|
"Error creating remote connection to=%s, e=%s, %s"
|
||||||
|
% (repr(kwargs), repr(e), sys.exc_info()[2])
|
||||||
|
)
|
||||||
|
return myConn
|
||||||
|
|
||||||
|
|
||||||
|
def fix_uri(uri, kwargs):
|
||||||
|
"""convert a generic pyro uri with '0.0.0.0' into the address we actually called"""
|
||||||
|
u = uri.asString()
|
||||||
|
s = u.split("[::0]") # IPv6 generic address
|
||||||
|
if len(s) == 1: # did not find one
|
||||||
|
s = u.split("0.0.0.0") # IPv4 generic address
|
||||||
|
if len(s) > 1: # found a generic
|
||||||
|
return kwargs["proxy_host"].join(s) # fill in our address for the host
|
||||||
|
return uri
|
||||||
|
|
||||||
|
|
||||||
|
# # # # # ----- the Class that defines a connection ----- # # # # #
|
||||||
|
class Connection(object):
|
||||||
|
# include connection attributes required by api definition.
|
||||||
|
Warning = api.Warning
|
||||||
|
Error = api.Error
|
||||||
|
InterfaceError = api.InterfaceError
|
||||||
|
DataError = api.DataError
|
||||||
|
DatabaseError = api.DatabaseError
|
||||||
|
OperationalError = api.OperationalError
|
||||||
|
IntegrityError = api.IntegrityError
|
||||||
|
InternalError = api.InternalError
|
||||||
|
NotSupportedError = api.NotSupportedError
|
||||||
|
ProgrammingError = api.ProgrammingError
|
||||||
|
# set up some class attributes
|
||||||
|
paramstyle = api.paramstyle
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dbapi(self): # a proposed db-api version 3 extension.
|
||||||
|
"Return a reference to the DBAPI module for this Connection."
|
||||||
|
return api
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.proxy = None
|
||||||
|
self.kwargs = {}
|
||||||
|
self.errorhandler = None
|
||||||
|
self.supportsTransactions = False
|
||||||
|
self.paramstyle = api.paramstyle
|
||||||
|
self.timeout = 30
|
||||||
|
self.cursors = {}
|
||||||
|
|
||||||
|
def connect(self, kwargs, connection_maker):
|
||||||
|
self.kwargs = kwargs
|
||||||
|
if verbose:
|
||||||
|
print('%s attempting: "%s"' % (version, repr(kwargs)))
|
||||||
|
self.proxy = connection_maker
|
||||||
|
##try:
|
||||||
|
ret = self.proxy.connect(kwargs) # ask the server to hook us up
|
||||||
|
##except ImportError, e: # Pyro is trying to import pywinTypes.comerrer
|
||||||
|
## self._raiseConnectionError(api.DatabaseError, 'Proxy cannot connect using=%s' % repr(kwargs))
|
||||||
|
if ret is not True:
|
||||||
|
self._raiseConnectionError(
|
||||||
|
api.OperationalError, "Proxy returns error message=%s" % repr(ret)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.supportsTransactions = self.getIndexedValue("supportsTransactions")
|
||||||
|
self.paramstyle = self.getIndexedValue("paramstyle")
|
||||||
|
self.timeout = self.getIndexedValue("timeout")
|
||||||
|
if verbose:
|
||||||
|
print("adodbapi.remote New connection at %X" % id(self))
|
||||||
|
|
||||||
|
def _raiseConnectionError(self, errorclass, errorvalue):
|
||||||
|
eh = self.errorhandler
|
||||||
|
if eh is None:
|
||||||
|
eh = api.standardErrorHandler
|
||||||
|
eh(self, None, errorclass, errorvalue)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the connection now (rather than whenever __del__ is called).
|
||||||
|
|
||||||
|
The connection will be unusable from this point forward;
|
||||||
|
an Error (or subclass) exception will be raised if any operation is attempted with the connection.
|
||||||
|
The same applies to all cursor objects trying to use the connection.
|
||||||
|
"""
|
||||||
|
for crsr in list(self.cursors.values())[
|
||||||
|
:
|
||||||
|
]: # copy the list, then close each one
|
||||||
|
crsr.close()
|
||||||
|
try:
|
||||||
|
"""close the underlying remote Connection object"""
|
||||||
|
self.proxy.close()
|
||||||
|
if verbose:
|
||||||
|
print("adodbapi.remote Closed connection at %X" % id(self))
|
||||||
|
object.__delattr__(
|
||||||
|
self, "proxy"
|
||||||
|
) # future attempts to use closed cursor will be caught by __getattr__
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
self.proxy.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def commit(self):
|
||||||
|
"""Commit any pending transaction to the database.
|
||||||
|
|
||||||
|
Note that if the database supports an auto-commit feature,
|
||||||
|
this must be initially off. An interface method may be provided to turn it back on.
|
||||||
|
Database modules that do not support transactions should implement this method with void functionality.
|
||||||
|
"""
|
||||||
|
if not self.supportsTransactions:
|
||||||
|
return
|
||||||
|
result = self.proxy.commit()
|
||||||
|
if result:
|
||||||
|
self._raiseConnectionError(
|
||||||
|
api.OperationalError, "Error during commit: %s" % result
|
||||||
|
)
|
||||||
|
|
||||||
|
def _rollback(self):
|
||||||
|
"""In case a database does provide transactions this method causes the the database to roll back to
|
||||||
|
the start of any pending transaction. Closing a connection without committing the changes first will
|
||||||
|
cause an implicit rollback to be performed.
|
||||||
|
"""
|
||||||
|
result = self.proxy.rollback()
|
||||||
|
if result:
|
||||||
|
self._raiseConnectionError(
|
||||||
|
api.OperationalError, "Error during rollback: %s" % result
|
||||||
|
)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
if name in ("paramstyle", "timeout", "autocommit"):
|
||||||
|
if self.proxy:
|
||||||
|
self.proxy.send_attribute_to_host(name, value)
|
||||||
|
object.__setattr__(self, name, value) # store attribute locally (too)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
if (
|
||||||
|
item == "rollback"
|
||||||
|
): # the rollback method only appears if the database supports transactions
|
||||||
|
if self.supportsTransactions:
|
||||||
|
return (
|
||||||
|
self._rollback
|
||||||
|
) # return the rollback method so the caller can execute it.
|
||||||
|
else:
|
||||||
|
raise self.ProgrammingError(
|
||||||
|
"this data provider does not support Rollback"
|
||||||
|
)
|
||||||
|
elif item in (
|
||||||
|
"dbms_name",
|
||||||
|
"dbms_version",
|
||||||
|
"connection_string",
|
||||||
|
"autocommit",
|
||||||
|
): # 'messages' ):
|
||||||
|
return self.getIndexedValue(item)
|
||||||
|
elif item == "proxy":
|
||||||
|
raise self.ProgrammingError("Attempting to use closed connection")
|
||||||
|
else:
|
||||||
|
raise self.ProgrammingError('No remote access for attribute="%s"' % item)
|
||||||
|
|
||||||
|
def getIndexedValue(self, index):
|
||||||
|
r = self.proxy.get_attribute_for_remote(index)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def cursor(self):
|
||||||
|
"Return a new Cursor Object using the connection."
|
||||||
|
myCursor = Cursor(self)
|
||||||
|
return myCursor
|
||||||
|
|
||||||
|
def _i_am_here(self, crsr):
|
||||||
|
"message from a new cursor proclaiming its existence"
|
||||||
|
self.cursors[crsr.id] = crsr
|
||||||
|
|
||||||
|
def _i_am_closing(self, crsr):
|
||||||
|
"message from a cursor giving connection a chance to clean up"
|
||||||
|
try:
|
||||||
|
del self.cursors[crsr.id]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __enter__(self): # Connections are context managers
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if exc_type:
|
||||||
|
self._rollback() # automatic rollback on errors
|
||||||
|
else:
|
||||||
|
self.commit()
|
||||||
|
|
||||||
|
def get_table_names(self):
|
||||||
|
return self.proxy.get_table_names()
|
||||||
|
|
||||||
|
|
||||||
|
def fixpickle(x):
|
||||||
|
"""pickle barfs on buffer(x) so we pass as array.array(x) then restore to original form for .execute()"""
|
||||||
|
if x is None:
|
||||||
|
return None
|
||||||
|
if isinstance(x, dict):
|
||||||
|
# for 'named' paramstyle user will pass a mapping
|
||||||
|
newargs = {}
|
||||||
|
for arg, val in list(x.items()):
|
||||||
|
if isinstance(val, memoryViewType):
|
||||||
|
newval = array.array("B")
|
||||||
|
newval.fromstring(val)
|
||||||
|
newargs[arg] = newval
|
||||||
|
else:
|
||||||
|
newargs[arg] = val
|
||||||
|
return newargs
|
||||||
|
# if not a mapping, then a sequence
|
||||||
|
newargs = []
|
||||||
|
for arg in x:
|
||||||
|
if isinstance(arg, memoryViewType):
|
||||||
|
newarg = array.array("B")
|
||||||
|
newarg.fromstring(arg)
|
||||||
|
newargs.append(newarg)
|
||||||
|
else:
|
||||||
|
newargs.append(arg)
|
||||||
|
return newargs
|
||||||
|
|
||||||
|
|
||||||
|
class Cursor(object):
|
||||||
|
def __init__(self, connection):
|
||||||
|
self.command = None
|
||||||
|
self.errorhandler = None ## was: connection.errorhandler
|
||||||
|
self.connection = connection
|
||||||
|
self.proxy = self.connection.proxy
|
||||||
|
self.rs = None # the fetchable data for this cursor
|
||||||
|
self.converters = NotImplemented
|
||||||
|
self.id = connection.proxy.build_cursor()
|
||||||
|
connection._i_am_here(self)
|
||||||
|
self.recordset_format = api.RS_REMOTE
|
||||||
|
if verbose:
|
||||||
|
print(
|
||||||
|
"%s New cursor at %X on conn %X"
|
||||||
|
% (version, id(self), id(self.connection))
|
||||||
|
)
|
||||||
|
|
||||||
|
def prepare(self, operation):
|
||||||
|
self.command = operation
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
self.proxy.crsr_prepare(self.id, operation)
|
||||||
|
|
||||||
|
def __iter__(self): # [2.1 Zamarev]
|
||||||
|
return iter(self.fetchone, None) # [2.1 Zamarev]
|
||||||
|
|
||||||
|
def __next__(self):
|
||||||
|
r = self.fetchone()
|
||||||
|
if r:
|
||||||
|
return r
|
||||||
|
raise StopIteration
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"Allow database cursors to be used with context managers."
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"Allow database cursors to be used with context managers."
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def __getattr__(self, key):
|
||||||
|
if key == "numberOfColumns":
|
||||||
|
try:
|
||||||
|
return len(self.rs[0])
|
||||||
|
except:
|
||||||
|
return 0
|
||||||
|
if key == "description":
|
||||||
|
try:
|
||||||
|
self.description = self.proxy.crsr_get_description(self.id)[:]
|
||||||
|
return self.description
|
||||||
|
except TypeError:
|
||||||
|
return None
|
||||||
|
if key == "columnNames":
|
||||||
|
try:
|
||||||
|
r = dict(
|
||||||
|
self.proxy.crsr_get_columnNames(self.id)
|
||||||
|
) # copy the remote columns
|
||||||
|
|
||||||
|
except TypeError:
|
||||||
|
r = {}
|
||||||
|
self.columnNames = r
|
||||||
|
return r
|
||||||
|
|
||||||
|
if key == "remote_cursor":
|
||||||
|
raise api.OperationalError
|
||||||
|
try:
|
||||||
|
return self.proxy.crsr_get_attribute_for_remote(self.id, key)
|
||||||
|
except AttributeError:
|
||||||
|
raise api.InternalError(
|
||||||
|
'Failure getting attribute "%s" from proxy cursor.' % key
|
||||||
|
)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key == "arraysize":
|
||||||
|
self.proxy.crsr_set_arraysize(self.id, value)
|
||||||
|
if key == "paramstyle":
|
||||||
|
if value in api.accepted_paramstyles:
|
||||||
|
self.proxy.crsr_set_paramstyle(self.id, value)
|
||||||
|
else:
|
||||||
|
self._raiseCursorError(
|
||||||
|
api.ProgrammingError, 'invalid paramstyle ="%s"' % value
|
||||||
|
)
|
||||||
|
object.__setattr__(self, key, value)
|
||||||
|
|
||||||
|
def _raiseCursorError(self, errorclass, errorvalue):
|
||||||
|
eh = self.errorhandler
|
||||||
|
if eh is None:
|
||||||
|
eh = api.standardErrorHandler
|
||||||
|
eh(self.connection, self, errorclass, errorvalue)
|
||||||
|
|
||||||
|
def execute(self, operation, parameters=None):
|
||||||
|
if self.connection is None:
|
||||||
|
self._raiseCursorError(
|
||||||
|
ProgrammingError, "Attempted operation on closed cursor"
|
||||||
|
)
|
||||||
|
self.command = operation
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.columnNames
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
fp = fixpickle(parameters)
|
||||||
|
if verbose > 2:
|
||||||
|
print(
|
||||||
|
(
|
||||||
|
'%s executing "%s" with params=%s'
|
||||||
|
% (version, operation, repr(parameters))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
result = self.proxy.crsr_execute(self.id, operation, fp)
|
||||||
|
if result: # an exception was triggered
|
||||||
|
self._raiseCursorError(result[0], result[1])
|
||||||
|
|
||||||
|
def executemany(self, operation, seq_of_parameters):
|
||||||
|
if self.connection is None:
|
||||||
|
self._raiseCursorError(
|
||||||
|
ProgrammingError, "Attempted operation on closed cursor"
|
||||||
|
)
|
||||||
|
self.command = operation
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.columnNames
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
sq = [fixpickle(x) for x in seq_of_parameters]
|
||||||
|
if verbose > 2:
|
||||||
|
print(
|
||||||
|
(
|
||||||
|
'%s executemany "%s" with params=%s'
|
||||||
|
% (version, operation, repr(seq_of_parameters))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.proxy.crsr_executemany(self.id, operation, sq)
|
||||||
|
|
||||||
|
def nextset(self):
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.columnNames
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if verbose > 2:
|
||||||
|
print(("%s nextset" % version))
|
||||||
|
return self.proxy.crsr_nextset(self.id)
|
||||||
|
|
||||||
|
def callproc(self, procname, parameters=None):
|
||||||
|
if self.connection is None:
|
||||||
|
self._raiseCursorError(
|
||||||
|
ProgrammingError, "Attempted operation on closed cursor"
|
||||||
|
)
|
||||||
|
self.command = procname
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.columnNames
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
fp = fixpickle(parameters)
|
||||||
|
if verbose > 2:
|
||||||
|
print(
|
||||||
|
(
|
||||||
|
'%s callproc "%s" with params=%s'
|
||||||
|
% (version, procname, repr(parameters))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return self.proxy.crsr_callproc(self.id, procname, fp)
|
||||||
|
|
||||||
|
def fetchone(self):
|
||||||
|
try:
|
||||||
|
f1 = self.proxy.crsr_fetchone(self.id)
|
||||||
|
except _BaseException as e:
|
||||||
|
self._raiseCursorError(api.DatabaseError, e)
|
||||||
|
else:
|
||||||
|
if f1 is None:
|
||||||
|
return None
|
||||||
|
self.rs = [f1]
|
||||||
|
return api.SQLrows(self.rs, 1, self)[
|
||||||
|
0
|
||||||
|
] # new object to hold the results of the fetch
|
||||||
|
|
||||||
|
def fetchmany(self, size=None):
|
||||||
|
try:
|
||||||
|
self.rs = self.proxy.crsr_fetchmany(self.id, size)
|
||||||
|
if not self.rs:
|
||||||
|
return []
|
||||||
|
r = api.SQLrows(self.rs, len(self.rs), self)
|
||||||
|
return r
|
||||||
|
except Exception as e:
|
||||||
|
self._raiseCursorError(api.DatabaseError, e)
|
||||||
|
|
||||||
|
def fetchall(self):
|
||||||
|
try:
|
||||||
|
self.rs = self.proxy.crsr_fetchall(self.id)
|
||||||
|
if not self.rs:
|
||||||
|
return []
|
||||||
|
return api.SQLrows(self.rs, len(self.rs), self)
|
||||||
|
except Exception as e:
|
||||||
|
self._raiseCursorError(api.DatabaseError, e)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.connection is None:
|
||||||
|
return
|
||||||
|
self.connection._i_am_closing(self) # take me off the connection's cursors list
|
||||||
|
try:
|
||||||
|
self.proxy.crsr_close(self.id)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.description
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
del self.rs # let go of the recordset
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.connection = (
|
||||||
|
None # this will make all future method calls on me throw an exception
|
||||||
|
)
|
||||||
|
self.proxy = None
|
||||||
|
if verbose:
|
||||||
|
print("adodbapi.remote Closed cursor at %X" % id(self))
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
self.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setinputsizes(self, sizes):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setoutputsize(self, size, column=None):
|
||||||
|
pass
|
15
lib/adodbapi/schema_table.py
Normal file
15
lib/adodbapi/schema_table.py
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
"""call using an open ADO connection --> list of table names"""
|
||||||
|
from . import adodbapi
|
||||||
|
|
||||||
|
|
||||||
|
def names(connection_object):
|
||||||
|
ado = connection_object.adoConn
|
||||||
|
schema = ado.OpenSchema(20) # constant = adSchemaTables
|
||||||
|
|
||||||
|
tables = []
|
||||||
|
while not schema.EOF:
|
||||||
|
name = adodbapi.getIndexedValue(schema.Fields, "TABLE_NAME").Value
|
||||||
|
tables.append(name)
|
||||||
|
schema.MoveNext()
|
||||||
|
del schema
|
||||||
|
return tables
|
70
lib/adodbapi/setup.py
Normal file
70
lib/adodbapi/setup.py
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
"""adodbapi -- a pure Python PEP 249 DB-API package using Microsoft ADO
|
||||||
|
|
||||||
|
Adodbapi can be run on CPython 3.5 and later.
|
||||||
|
or IronPython version 2.6 and later (in theory, possibly no longer in practice!)
|
||||||
|
"""
|
||||||
|
CLASSIFIERS = """\
|
||||||
|
Development Status :: 5 - Production/Stable
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
|
||||||
|
Operating System :: Microsoft :: Windows
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: SQL
|
||||||
|
Topic :: Software Development
|
||||||
|
Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Topic :: Database
|
||||||
|
"""
|
||||||
|
|
||||||
|
NAME = "adodbapi"
|
||||||
|
MAINTAINER = "Vernon Cole"
|
||||||
|
MAINTAINER_EMAIL = "vernondcole@gmail.com"
|
||||||
|
DESCRIPTION = (
|
||||||
|
"""A pure Python package implementing PEP 249 DB-API using Microsoft ADO."""
|
||||||
|
)
|
||||||
|
URL = "http://sourceforge.net/projects/adodbapi"
|
||||||
|
LICENSE = "LGPL"
|
||||||
|
CLASSIFIERS = filter(None, CLASSIFIERS.split("\n"))
|
||||||
|
AUTHOR = "Henrik Ekelund, Vernon Cole, et.al."
|
||||||
|
AUTHOR_EMAIL = "vernondcole@gmail.com"
|
||||||
|
PLATFORMS = ["Windows", "Linux"]
|
||||||
|
|
||||||
|
VERSION = None # in case searching for version fails
|
||||||
|
a = open("adodbapi.py") # find the version string in the source code
|
||||||
|
for line in a:
|
||||||
|
if "__version__" in line:
|
||||||
|
VERSION = line.split("'")[1]
|
||||||
|
print('adodbapi version="%s"' % VERSION)
|
||||||
|
break
|
||||||
|
a.close()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_package():
|
||||||
|
from distutils.command.build_py import build_py
|
||||||
|
from distutils.core import setup
|
||||||
|
|
||||||
|
setup(
|
||||||
|
cmdclass={"build_py": build_py},
|
||||||
|
name=NAME,
|
||||||
|
maintainer=MAINTAINER,
|
||||||
|
maintainer_email=MAINTAINER_EMAIL,
|
||||||
|
description=DESCRIPTION,
|
||||||
|
url=URL,
|
||||||
|
keywords="database ado odbc dbapi db-api Microsoft SQL",
|
||||||
|
## download_url=DOWNLOAD_URL,
|
||||||
|
long_description=open("README.txt").read(),
|
||||||
|
license=LICENSE,
|
||||||
|
classifiers=CLASSIFIERS,
|
||||||
|
author=AUTHOR,
|
||||||
|
author_email=AUTHOR_EMAIL,
|
||||||
|
platforms=PLATFORMS,
|
||||||
|
version=VERSION,
|
||||||
|
package_dir={"adodbapi": ""},
|
||||||
|
packages=["adodbapi"],
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
setup_package()
|
1692
lib/adodbapi/test/adodbapitest.py
Normal file
1692
lib/adodbapi/test/adodbapitest.py
Normal file
File diff suppressed because it is too large
Load diff
221
lib/adodbapi/test/adodbapitestconfig.py
Normal file
221
lib/adodbapi/test/adodbapitestconfig.py
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
# Configure this to _YOUR_ environment in order to run the testcases.
|
||||||
|
"testADOdbapiConfig.py v 2.6.2.B00"
|
||||||
|
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
# #
|
||||||
|
# # TESTERS:
|
||||||
|
# #
|
||||||
|
# # You will need to make numerous modifications to this file
|
||||||
|
# # to adapt it to your own testing environment.
|
||||||
|
# #
|
||||||
|
# # Skip down to the next "# #" line --
|
||||||
|
# # -- the things you need to change are below it.
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
import platform
|
||||||
|
import random
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import is64bit
|
||||||
|
import setuptestframework
|
||||||
|
import tryconnection
|
||||||
|
|
||||||
|
print("\nPython", sys.version)
|
||||||
|
node = platform.node()
|
||||||
|
try:
|
||||||
|
print(
|
||||||
|
"node=%s, is64bit.os()= %s, is64bit.Python()= %s"
|
||||||
|
% (node, is64bit.os(), is64bit.Python())
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if "--help" in sys.argv:
|
||||||
|
print(
|
||||||
|
"""Valid command-line switches are:
|
||||||
|
--package - create a temporary test package, run 2to3 if needed.
|
||||||
|
--all - run all possible tests
|
||||||
|
--time - loop over time format tests (including mxdatetime if present)
|
||||||
|
--nojet - do not test against an ACCESS database file
|
||||||
|
--mssql - test against Microsoft SQL server
|
||||||
|
--pg - test against PostgreSQL
|
||||||
|
--mysql - test against MariaDB
|
||||||
|
--remote= - test unsing remote server at= (experimental)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
exit()
|
||||||
|
try:
|
||||||
|
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
|
||||||
|
except:
|
||||||
|
onWindows = False
|
||||||
|
|
||||||
|
# create a random name for temporary table names
|
||||||
|
_alphabet = (
|
||||||
|
"PYFGCRLAOEUIDHTNSQJKXBMWVZ" # why, yes, I do happen to use a dvorak keyboard
|
||||||
|
)
|
||||||
|
tmp = "".join([random.choice(_alphabet) for x in range(9)])
|
||||||
|
mdb_name = "xx_" + tmp + ".mdb" # generate a non-colliding name for the temporary .mdb
|
||||||
|
testfolder = setuptestframework.maketemp()
|
||||||
|
|
||||||
|
if "--package" in sys.argv:
|
||||||
|
# create a new adodbapi module -- running 2to3 if needed.
|
||||||
|
pth = setuptestframework.makeadopackage(testfolder)
|
||||||
|
else:
|
||||||
|
# use the adodbapi module in which this file appears
|
||||||
|
pth = setuptestframework.find_ado_path()
|
||||||
|
if pth not in sys.path:
|
||||||
|
# look here _first_ to find modules
|
||||||
|
sys.path.insert(1, pth)
|
||||||
|
|
||||||
|
proxy_host = None
|
||||||
|
for arg in sys.argv:
|
||||||
|
if arg.startswith("--remote="):
|
||||||
|
proxy_host = arg.split("=")[1]
|
||||||
|
import adodbapi.remote as remote
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
# function to clean up the temporary folder -- calling program must run this function before exit.
|
||||||
|
cleanup = setuptestframework.getcleanupfunction()
|
||||||
|
try:
|
||||||
|
import adodbapi # will (hopefully) be imported using the "pth" discovered above
|
||||||
|
except SyntaxError:
|
||||||
|
print(
|
||||||
|
'\n* * * Are you trying to run Python2 code using Python3? Re-run this test using the "--package" switch.'
|
||||||
|
)
|
||||||
|
sys.exit(11)
|
||||||
|
try:
|
||||||
|
print(adodbapi.version) # show version
|
||||||
|
except:
|
||||||
|
print('"adodbapi.version" not present or not working.')
|
||||||
|
print(__doc__)
|
||||||
|
|
||||||
|
verbose = False
|
||||||
|
for a in sys.argv:
|
||||||
|
if a.startswith("--verbose"):
|
||||||
|
arg = True
|
||||||
|
try:
|
||||||
|
arg = int(a.split("=")[1])
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
adodbapi.adodbapi.verbose = arg
|
||||||
|
verbose = arg
|
||||||
|
|
||||||
|
doAllTests = "--all" in sys.argv
|
||||||
|
doAccessTest = not ("--nojet" in sys.argv)
|
||||||
|
doSqlServerTest = "--mssql" in sys.argv or doAllTests
|
||||||
|
doMySqlTest = "--mysql" in sys.argv or doAllTests
|
||||||
|
doPostgresTest = "--pg" in sys.argv or doAllTests
|
||||||
|
iterateOverTimeTests = ("--time" in sys.argv or doAllTests) and onWindows
|
||||||
|
|
||||||
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
||||||
|
# # start your environment setup here v v v
|
||||||
|
SQL_HOST_NODE = "testsql.2txt.us,1430"
|
||||||
|
|
||||||
|
try: # If mx extensions are installed, use mxDateTime
|
||||||
|
import mx.DateTime
|
||||||
|
|
||||||
|
doMxDateTimeTest = True
|
||||||
|
except:
|
||||||
|
doMxDateTimeTest = False # Requires eGenixMXExtensions
|
||||||
|
|
||||||
|
doTimeTest = True # obsolete python time format
|
||||||
|
|
||||||
|
if doAccessTest:
|
||||||
|
if proxy_host: # determine the (probably remote) database file folder
|
||||||
|
c = {"macro_find_temp_test_path": ["mdb", mdb_name], "proxy_host": proxy_host}
|
||||||
|
else:
|
||||||
|
c = {"mdb": setuptestframework.makemdb(testfolder, mdb_name)}
|
||||||
|
|
||||||
|
# macro definition for keyword "provider" using macro "is64bit" -- see documentation
|
||||||
|
# is64bit will return true for 64 bit versions of Python, so the macro will select the ACE provider
|
||||||
|
# (If running a remote ADO service, this will test the 64-bitedness of the ADO server.)
|
||||||
|
c["macro_is64bit"] = [
|
||||||
|
"provider",
|
||||||
|
"Microsoft.ACE.OLEDB.12.0", # 64 bit provider
|
||||||
|
"Microsoft.Jet.OLEDB.4.0",
|
||||||
|
] # 32 bit provider
|
||||||
|
connStrAccess = "Provider=%(provider)s;Data Source=%(mdb)s" # ;Mode=ReadWrite;Persist Security Info=False;Jet OLEDB:Bypass UserInfo Validation=True"
|
||||||
|
print(
|
||||||
|
" ...Testing ACCESS connection to {} file...".format(
|
||||||
|
c.get("mdb", "remote .mdb")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection(
|
||||||
|
verbose, connStrAccess, 10, **c
|
||||||
|
)
|
||||||
|
|
||||||
|
if doSqlServerTest:
|
||||||
|
c = {
|
||||||
|
"host": SQL_HOST_NODE, # name of computer with SQL Server
|
||||||
|
"database": "adotest",
|
||||||
|
"user": "adotestuser", # None implies Windows security
|
||||||
|
"password": "Sq1234567",
|
||||||
|
# macro definition for keyword "security" using macro "auto_security"
|
||||||
|
"macro_auto_security": "security",
|
||||||
|
"provider": "MSOLEDBSQL; MARS Connection=True",
|
||||||
|
}
|
||||||
|
if proxy_host:
|
||||||
|
c["proxy_host"] = proxy_host
|
||||||
|
connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;"
|
||||||
|
print(" ...Testing MS-SQL login to {}...".format(c["host"]))
|
||||||
|
(
|
||||||
|
doSqlServerTest,
|
||||||
|
connStrSQLServer,
|
||||||
|
dbSqlServerconnect,
|
||||||
|
) = tryconnection.try_connection(verbose, connStr, 30, **c)
|
||||||
|
|
||||||
|
if doMySqlTest:
|
||||||
|
c = {
|
||||||
|
"host": "testmysql.2txt.us",
|
||||||
|
"database": "adodbapitest",
|
||||||
|
"user": "adotest",
|
||||||
|
"password": "12345678",
|
||||||
|
"port": "3330", # note the nonstandard port for obfuscation
|
||||||
|
"driver": "MySQL ODBC 5.1 Driver",
|
||||||
|
} # or _driver="MySQL ODBC 3.51 Driver
|
||||||
|
if proxy_host:
|
||||||
|
c["proxy_host"] = proxy_host
|
||||||
|
c["macro_is64bit"] = [
|
||||||
|
"provider",
|
||||||
|
"Provider=MSDASQL;",
|
||||||
|
] # turn on the 64 bit ODBC adapter only if needed
|
||||||
|
cs = (
|
||||||
|
"%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3330;"
|
||||||
|
+ "Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;"
|
||||||
|
)
|
||||||
|
print(" ...Testing MySql login to {}...".format(c["host"]))
|
||||||
|
doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection(
|
||||||
|
verbose, cs, 5, **c
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if doPostgresTest:
|
||||||
|
_computername = "testpg.2txt.us"
|
||||||
|
_databasename = "adotest"
|
||||||
|
_username = "adotestuser"
|
||||||
|
_password = "12345678"
|
||||||
|
kws = {"timeout": 4}
|
||||||
|
kws["macro_is64bit"] = [
|
||||||
|
"prov_drv",
|
||||||
|
"Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}",
|
||||||
|
"Driver=PostgreSQL Unicode",
|
||||||
|
]
|
||||||
|
# get driver from http://www.postgresql.org/ftp/odbc/versions/
|
||||||
|
# test using positional and keyword arguments (bad example for real code)
|
||||||
|
if proxy_host:
|
||||||
|
kws["proxy_host"] = proxy_host
|
||||||
|
print(" ...Testing PostgreSQL login to {}...".format(_computername))
|
||||||
|
doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection(
|
||||||
|
verbose,
|
||||||
|
"%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;port=5430;", # note nonstandard port
|
||||||
|
_username,
|
||||||
|
_password,
|
||||||
|
_computername,
|
||||||
|
_databasename,
|
||||||
|
**kws
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest
|
||||||
|
), "No database engine found for testing"
|
939
lib/adodbapi/test/dbapi20.py
Normal file
939
lib/adodbapi/test/dbapi20.py
Normal file
|
@ -0,0 +1,939 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
""" Python DB API 2.0 driver compliance unit test suite.
|
||||||
|
|
||||||
|
This software is Public Domain and may be used without restrictions.
|
||||||
|
|
||||||
|
"Now we have booze and barflies entering the discussion, plus rumours of
|
||||||
|
DBAs on drugs... and I won't tell you what flashes through my mind each
|
||||||
|
time I read the subject line with 'Anal Compliance' in it. All around
|
||||||
|
this is turning out to be a thoroughly unwholesome unit test."
|
||||||
|
|
||||||
|
-- Ian Bicking
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "$Revision: 1.15.0 $"[11:-2]
|
||||||
|
__author__ = "Stuart Bishop <stuart@stuartbishop.net>"
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
if sys.version[0] >= "3": # python 3.x
|
||||||
|
_BaseException = Exception
|
||||||
|
|
||||||
|
def _failUnless(self, expr, msg=None):
|
||||||
|
self.assertTrue(expr, msg)
|
||||||
|
|
||||||
|
else: # python 2.x
|
||||||
|
from exceptions import Exception as _BaseException
|
||||||
|
|
||||||
|
def _failUnless(self, expr, msg=None):
|
||||||
|
self.failUnless(expr, msg) ## deprecated since Python 2.6
|
||||||
|
|
||||||
|
|
||||||
|
# set this to "True" to follow API 2.0 to the letter
|
||||||
|
TEST_FOR_NON_IDEMPOTENT_CLOSE = False
|
||||||
|
|
||||||
|
# Revision 1.15 2019/11/22 00:50:00 kf7xm
|
||||||
|
# Make Turn off IDEMPOTENT_CLOSE a proper skipTest
|
||||||
|
|
||||||
|
# Revision 1.14 2013/05/20 11:02:05 kf7xm
|
||||||
|
# Add a literal string to the format insertion test to catch trivial re-format algorithms
|
||||||
|
|
||||||
|
# Revision 1.13 2013/05/08 14:31:50 kf7xm
|
||||||
|
# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure
|
||||||
|
|
||||||
|
|
||||||
|
# Revision 1.12 2009/02/06 03:35:11 kf7xm
|
||||||
|
# Tested okay with Python 3.0, includes last minute patches from Mark H.
|
||||||
|
#
|
||||||
|
# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole
|
||||||
|
# Include latest changes from main branch
|
||||||
|
# Updates for py3k
|
||||||
|
#
|
||||||
|
# Revision 1.11 2005/01/02 02:41:01 zenzen
|
||||||
|
# Update author email address
|
||||||
|
#
|
||||||
|
# Revision 1.10 2003/10/09 03:14:14 zenzen
|
||||||
|
# Add test for DB API 2.0 optional extension, where database exceptions
|
||||||
|
# are exposed as attributes on the Connection object.
|
||||||
|
#
|
||||||
|
# Revision 1.9 2003/08/13 01:16:36 zenzen
|
||||||
|
# Minor tweak from Stefan Fleiter
|
||||||
|
#
|
||||||
|
# Revision 1.8 2003/04/10 00:13:25 zenzen
|
||||||
|
# Changes, as per suggestions by M.-A. Lemburg
|
||||||
|
# - Add a table prefix, to ensure namespace collisions can always be avoided
|
||||||
|
#
|
||||||
|
# Revision 1.7 2003/02/26 23:33:37 zenzen
|
||||||
|
# Break out DDL into helper functions, as per request by David Rushby
|
||||||
|
#
|
||||||
|
# Revision 1.6 2003/02/21 03:04:33 zenzen
|
||||||
|
# Stuff from Henrik Ekelund:
|
||||||
|
# added test_None
|
||||||
|
# added test_nextset & hooks
|
||||||
|
#
|
||||||
|
# Revision 1.5 2003/02/17 22:08:43 zenzen
|
||||||
|
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
|
||||||
|
# defaults to 1 & generic cursor.callproc test added
|
||||||
|
#
|
||||||
|
# Revision 1.4 2003/02/15 00:16:33 zenzen
|
||||||
|
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
|
||||||
|
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
|
||||||
|
# - Class renamed
|
||||||
|
# - Now a subclass of TestCase, to avoid requiring the driver stub
|
||||||
|
# to use multiple inheritance
|
||||||
|
# - Reversed the polarity of buggy test in test_description
|
||||||
|
# - Test exception heirarchy correctly
|
||||||
|
# - self.populate is now self._populate(), so if a driver stub
|
||||||
|
# overrides self.ddl1 this change propogates
|
||||||
|
# - VARCHAR columns now have a width, which will hopefully make the
|
||||||
|
# DDL even more portible (this will be reversed if it causes more problems)
|
||||||
|
# - cursor.rowcount being checked after various execute and fetchXXX methods
|
||||||
|
# - Check for fetchall and fetchmany returning empty lists after results
|
||||||
|
# are exhausted (already checking for empty lists if select retrieved
|
||||||
|
# nothing
|
||||||
|
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
|
||||||
|
#
|
||||||
|
def str2bytes(sval):
|
||||||
|
if sys.version_info < (3, 0) and isinstance(sval, str):
|
||||||
|
sval = sval.decode("latin1")
|
||||||
|
return sval.encode("latin1") # python 3 make unicode into bytes
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseAPI20Test(unittest.TestCase):
|
||||||
|
"""Test a database self.driver for DB API 2.0 compatibility.
|
||||||
|
This implementation tests Gadfly, but the TestCase
|
||||||
|
is structured so that other self.drivers can subclass this
|
||||||
|
test case to ensure compiliance with the DB-API. It is
|
||||||
|
expected that this TestCase may be expanded in the future
|
||||||
|
if ambiguities or edge conditions are discovered.
|
||||||
|
|
||||||
|
The 'Optional Extensions' are not yet being tested.
|
||||||
|
|
||||||
|
self.drivers should subclass this test, overriding setUp, tearDown,
|
||||||
|
self.driver, connect_args and connect_kw_args. Class specification
|
||||||
|
should be as follows:
|
||||||
|
|
||||||
|
import dbapi20
|
||||||
|
class mytest(dbapi20.DatabaseAPI20Test):
|
||||||
|
[...]
|
||||||
|
|
||||||
|
Don't 'import DatabaseAPI20Test from dbapi20', or you will
|
||||||
|
confuse the unit tester - just 'import dbapi20'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The self.driver module. This should be the module where the 'connect'
|
||||||
|
# method is to be found
|
||||||
|
driver = None
|
||||||
|
connect_args = () # List of arguments to pass to connect
|
||||||
|
connect_kw_args = {} # Keyword arguments for connect
|
||||||
|
table_prefix = "dbapi20test_" # If you need to specify a prefix for tables
|
||||||
|
|
||||||
|
ddl1 = "create table %sbooze (name varchar(20))" % table_prefix
|
||||||
|
ddl2 = "create table %sbarflys (name varchar(20), drink varchar(30))" % table_prefix
|
||||||
|
xddl1 = "drop table %sbooze" % table_prefix
|
||||||
|
xddl2 = "drop table %sbarflys" % table_prefix
|
||||||
|
|
||||||
|
lowerfunc = "lower" # Name of stored procedure to convert string->lowercase
|
||||||
|
|
||||||
|
# Some drivers may need to override these helpers, for example adding
|
||||||
|
# a 'commit' after the execute.
|
||||||
|
def executeDDL1(self, cursor):
|
||||||
|
cursor.execute(self.ddl1)
|
||||||
|
|
||||||
|
def executeDDL2(self, cursor):
|
||||||
|
cursor.execute(self.ddl2)
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""self.drivers should override this method to perform required setup
|
||||||
|
if any is necessary, such as creating the database.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""self.drivers should override this method to perform required cleanup
|
||||||
|
if any is necessary, such as deleting the test database.
|
||||||
|
The default drops the tables that may be created.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
for ddl in (self.xddl1, self.xddl2):
|
||||||
|
try:
|
||||||
|
cur.execute(ddl)
|
||||||
|
con.commit()
|
||||||
|
except self.driver.Error:
|
||||||
|
# Assume table didn't exist. Other tests will check if
|
||||||
|
# execute is busted.
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
except _BaseException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _connect(self):
|
||||||
|
try:
|
||||||
|
r = self.driver.connect(*self.connect_args, **self.connect_kw_args)
|
||||||
|
except AttributeError:
|
||||||
|
self.fail("No connect method found in self.driver module")
|
||||||
|
return r
|
||||||
|
|
||||||
|
def test_connect(self):
|
||||||
|
con = self._connect()
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_apilevel(self):
|
||||||
|
try:
|
||||||
|
# Must exist
|
||||||
|
apilevel = self.driver.apilevel
|
||||||
|
# Must equal 2.0
|
||||||
|
self.assertEqual(apilevel, "2.0")
|
||||||
|
except AttributeError:
|
||||||
|
self.fail("Driver doesn't define apilevel")
|
||||||
|
|
||||||
|
def test_threadsafety(self):
|
||||||
|
try:
|
||||||
|
# Must exist
|
||||||
|
threadsafety = self.driver.threadsafety
|
||||||
|
# Must be a valid value
|
||||||
|
_failUnless(self, threadsafety in (0, 1, 2, 3))
|
||||||
|
except AttributeError:
|
||||||
|
self.fail("Driver doesn't define threadsafety")
|
||||||
|
|
||||||
|
def test_paramstyle(self):
|
||||||
|
try:
|
||||||
|
# Must exist
|
||||||
|
paramstyle = self.driver.paramstyle
|
||||||
|
# Must be a valid value
|
||||||
|
_failUnless(
|
||||||
|
self, paramstyle in ("qmark", "numeric", "named", "format", "pyformat")
|
||||||
|
)
|
||||||
|
except AttributeError:
|
||||||
|
self.fail("Driver doesn't define paramstyle")
|
||||||
|
|
||||||
|
def test_Exceptions(self):
|
||||||
|
# Make sure required exceptions exist, and are in the
|
||||||
|
# defined heirarchy.
|
||||||
|
if sys.version[0] == "3": # under Python 3 StardardError no longer exists
|
||||||
|
self.assertTrue(issubclass(self.driver.Warning, Exception))
|
||||||
|
self.assertTrue(issubclass(self.driver.Error, Exception))
|
||||||
|
else:
|
||||||
|
self.failUnless(issubclass(self.driver.Warning, Exception))
|
||||||
|
self.failUnless(issubclass(self.driver.Error, Exception))
|
||||||
|
|
||||||
|
_failUnless(self, issubclass(self.driver.InterfaceError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.DatabaseError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.OperationalError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.IntegrityError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.InternalError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.ProgrammingError, self.driver.Error))
|
||||||
|
_failUnless(self, issubclass(self.driver.NotSupportedError, self.driver.Error))
|
||||||
|
|
||||||
|
def test_ExceptionsAsConnectionAttributes(self):
|
||||||
|
# OPTIONAL EXTENSION
|
||||||
|
# Test for the optional DB API 2.0 extension, where the exceptions
|
||||||
|
# are exposed as attributes on the Connection object
|
||||||
|
# I figure this optional extension will be implemented by any
|
||||||
|
# driver author who is using this test suite, so it is enabled
|
||||||
|
# by default.
|
||||||
|
con = self._connect()
|
||||||
|
drv = self.driver
|
||||||
|
_failUnless(self, con.Warning is drv.Warning)
|
||||||
|
_failUnless(self, con.Error is drv.Error)
|
||||||
|
_failUnless(self, con.InterfaceError is drv.InterfaceError)
|
||||||
|
_failUnless(self, con.DatabaseError is drv.DatabaseError)
|
||||||
|
_failUnless(self, con.OperationalError is drv.OperationalError)
|
||||||
|
_failUnless(self, con.IntegrityError is drv.IntegrityError)
|
||||||
|
_failUnless(self, con.InternalError is drv.InternalError)
|
||||||
|
_failUnless(self, con.ProgrammingError is drv.ProgrammingError)
|
||||||
|
_failUnless(self, con.NotSupportedError is drv.NotSupportedError)
|
||||||
|
|
||||||
|
def test_commit(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
# Commit must work, even if it doesn't do anything
|
||||||
|
con.commit()
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_rollback(self):
|
||||||
|
con = self._connect()
|
||||||
|
# If rollback is defined, it should either work or throw
|
||||||
|
# the documented exception
|
||||||
|
if hasattr(con, "rollback"):
|
||||||
|
try:
|
||||||
|
con.rollback()
|
||||||
|
except self.driver.NotSupportedError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_cursor(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_cursor_isolation(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
# Make sure cursors created from the same connection have
|
||||||
|
# the documented transaction isolation level
|
||||||
|
cur1 = con.cursor()
|
||||||
|
cur2 = con.cursor()
|
||||||
|
self.executeDDL1(cur1)
|
||||||
|
cur1.execute(
|
||||||
|
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||||
|
)
|
||||||
|
cur2.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
booze = cur2.fetchall()
|
||||||
|
self.assertEqual(len(booze), 1)
|
||||||
|
self.assertEqual(len(booze[0]), 1)
|
||||||
|
self.assertEqual(booze[0][0], "Victoria Bitter")
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_description(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.description,
|
||||||
|
None,
|
||||||
|
"cursor.description should be none after executing a "
|
||||||
|
"statement that can return no rows (such as DDL)",
|
||||||
|
)
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
self.assertEqual(
|
||||||
|
len(cur.description), 1, "cursor.description describes too many columns"
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
len(cur.description[0]),
|
||||||
|
7,
|
||||||
|
"cursor.description[x] tuples must have 7 elements",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.description[0][0].lower(),
|
||||||
|
"name",
|
||||||
|
"cursor.description[x][0] must return column name",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.description[0][1],
|
||||||
|
self.driver.STRING,
|
||||||
|
"cursor.description[x][1] must return column type. Got %r"
|
||||||
|
% cur.description[0][1],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make sure self.description gets reset
|
||||||
|
self.executeDDL2(cur)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.description,
|
||||||
|
None,
|
||||||
|
"cursor.description not being set to None when executing "
|
||||||
|
"no-result statements (eg. DDL)",
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_rowcount(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
_failUnless(
|
||||||
|
self,
|
||||||
|
cur.rowcount in (-1, 0), # Bug #543885
|
||||||
|
"cursor.rowcount should be -1 or 0 after executing no-result "
|
||||||
|
"statements",
|
||||||
|
)
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||||
|
)
|
||||||
|
_failUnless(
|
||||||
|
self,
|
||||||
|
cur.rowcount in (-1, 1),
|
||||||
|
"cursor.rowcount should == number or rows inserted, or "
|
||||||
|
"set to -1 after executing an insert statement",
|
||||||
|
)
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
_failUnless(
|
||||||
|
self,
|
||||||
|
cur.rowcount in (-1, 1),
|
||||||
|
"cursor.rowcount should == number of rows returned, or "
|
||||||
|
"set to -1 after executing a select statement",
|
||||||
|
)
|
||||||
|
self.executeDDL2(cur)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.rowcount,
|
||||||
|
-1,
|
||||||
|
"cursor.rowcount not being reset to -1 after executing "
|
||||||
|
"no-result statements",
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
lower_func = "lower"
|
||||||
|
|
||||||
|
def test_callproc(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
if self.lower_func and hasattr(cur, "callproc"):
|
||||||
|
r = cur.callproc(self.lower_func, ("FOO",))
|
||||||
|
self.assertEqual(len(r), 1)
|
||||||
|
self.assertEqual(r[0], "FOO")
|
||||||
|
r = cur.fetchall()
|
||||||
|
self.assertEqual(len(r), 1, "callproc produced no result set")
|
||||||
|
self.assertEqual(len(r[0]), 1, "callproc produced invalid result set")
|
||||||
|
self.assertEqual(r[0][0], "foo", "callproc produced invalid results")
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_close(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
# cursor.execute should raise an Error if called after connection
|
||||||
|
# closed
|
||||||
|
self.assertRaises(self.driver.Error, self.executeDDL1, cur)
|
||||||
|
|
||||||
|
# connection.commit should raise an Error if called after connection'
|
||||||
|
# closed.'
|
||||||
|
self.assertRaises(self.driver.Error, con.commit)
|
||||||
|
|
||||||
|
# connection.close should raise an Error if called more than once
|
||||||
|
#!!! reasonable persons differ about the usefulness of this test and this feature !!!
|
||||||
|
if TEST_FOR_NON_IDEMPOTENT_CLOSE:
|
||||||
|
self.assertRaises(self.driver.Error, con.close)
|
||||||
|
else:
|
||||||
|
self.skipTest(
|
||||||
|
"Non-idempotent close is considered a bad thing by some people."
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_execute(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self._paraminsert(cur)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def _paraminsert(self, cur):
|
||||||
|
self.executeDDL2(cur)
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% (self.table_prefix)
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 1))
|
||||||
|
|
||||||
|
if self.driver.paramstyle == "qmark":
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% self.table_prefix,
|
||||||
|
("Cooper's",),
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "numeric":
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% self.table_prefix,
|
||||||
|
("Cooper's",),
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "named":
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% self.table_prefix,
|
||||||
|
{"beer": "Cooper's"},
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "format":
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% self.table_prefix,
|
||||||
|
("Cooper's",),
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "pyformat":
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')"
|
||||||
|
% self.table_prefix,
|
||||||
|
{"beer": "Cooper's"},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.fail("Invalid paramstyle")
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 1))
|
||||||
|
|
||||||
|
cur.execute("select name, drink from %sbarflys" % self.table_prefix)
|
||||||
|
res = cur.fetchall()
|
||||||
|
self.assertEqual(len(res), 2, "cursor.fetchall returned too few rows")
|
||||||
|
beers = [res[0][0], res[1][0]]
|
||||||
|
beers.sort()
|
||||||
|
self.assertEqual(
|
||||||
|
beers[0],
|
||||||
|
"Cooper's",
|
||||||
|
"cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
beers[1],
|
||||||
|
"Victoria Bitter",
|
||||||
|
"cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly",
|
||||||
|
)
|
||||||
|
trouble = "thi%s :may ca%(u)se? troub:1e"
|
||||||
|
self.assertEqual(
|
||||||
|
res[0][1],
|
||||||
|
trouble,
|
||||||
|
"cursor.fetchall retrieved incorrect data, or data inserted "
|
||||||
|
"incorrectly. Got=%s, Expected=%s" % (repr(res[0][1]), repr(trouble)),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
res[1][1],
|
||||||
|
trouble,
|
||||||
|
"cursor.fetchall retrieved incorrect data, or data inserted "
|
||||||
|
"incorrectly. Got=%s, Expected=%s" % (repr(res[1][1]), repr(trouble)),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_executemany(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
largs = [("Cooper's",), ("Boag's",)]
|
||||||
|
margs = [{"beer": "Cooper's"}, {"beer": "Boag's"}]
|
||||||
|
if self.driver.paramstyle == "qmark":
|
||||||
|
cur.executemany(
|
||||||
|
"insert into %sbooze values (?)" % self.table_prefix, largs
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "numeric":
|
||||||
|
cur.executemany(
|
||||||
|
"insert into %sbooze values (:1)" % self.table_prefix, largs
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "named":
|
||||||
|
cur.executemany(
|
||||||
|
"insert into %sbooze values (:beer)" % self.table_prefix, margs
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "format":
|
||||||
|
cur.executemany(
|
||||||
|
"insert into %sbooze values (%%s)" % self.table_prefix, largs
|
||||||
|
)
|
||||||
|
elif self.driver.paramstyle == "pyformat":
|
||||||
|
cur.executemany(
|
||||||
|
"insert into %sbooze values (%%(beer)s)" % (self.table_prefix),
|
||||||
|
margs,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.fail("Unknown paramstyle")
|
||||||
|
_failUnless(
|
||||||
|
self,
|
||||||
|
cur.rowcount in (-1, 2),
|
||||||
|
"insert using cursor.executemany set cursor.rowcount to "
|
||||||
|
"incorrect value %r" % cur.rowcount,
|
||||||
|
)
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
res = cur.fetchall()
|
||||||
|
self.assertEqual(
|
||||||
|
len(res), 2, "cursor.fetchall retrieved incorrect number of rows"
|
||||||
|
)
|
||||||
|
beers = [res[0][0], res[1][0]]
|
||||||
|
beers.sort()
|
||||||
|
self.assertEqual(
|
||||||
|
beers[0], "Boag's", 'incorrect data "%s" retrieved' % beers[0]
|
||||||
|
)
|
||||||
|
self.assertEqual(beers[1], "Cooper's", "incorrect data retrieved")
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_fetchone(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
|
||||||
|
# cursor.fetchone should raise an Error if called before
|
||||||
|
# executing a select-type query
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||||
|
|
||||||
|
# cursor.fetchone should raise an Error if called after
|
||||||
|
# executing a query that cannnot return rows
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||||
|
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.fetchone(),
|
||||||
|
None,
|
||||||
|
"cursor.fetchone should return None if a query retrieves " "no rows",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 0))
|
||||||
|
|
||||||
|
# cursor.fetchone should raise an Error if called after
|
||||||
|
# executing a query that cannnot return rows
|
||||||
|
cur.execute(
|
||||||
|
"insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix)
|
||||||
|
)
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchone)
|
||||||
|
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
r = cur.fetchone()
|
||||||
|
self.assertEqual(
|
||||||
|
len(r), 1, "cursor.fetchone should have retrieved a single row"
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
r[0], "Victoria Bitter", "cursor.fetchone retrieved incorrect data"
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
cur.fetchone(),
|
||||||
|
None,
|
||||||
|
"cursor.fetchone should return None if no more rows available",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 1))
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
samples = [
|
||||||
|
"Carlton Cold",
|
||||||
|
"Carlton Draft",
|
||||||
|
"Mountain Goat",
|
||||||
|
"Redback",
|
||||||
|
"Victoria Bitter",
|
||||||
|
"XXXX",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _populate(self):
|
||||||
|
"""Return a list of sql commands to setup the DB for the fetch
|
||||||
|
tests.
|
||||||
|
"""
|
||||||
|
populate = [
|
||||||
|
"insert into %sbooze values ('%s')" % (self.table_prefix, s)
|
||||||
|
for s in self.samples
|
||||||
|
]
|
||||||
|
return populate
|
||||||
|
|
||||||
|
def test_fetchmany(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
|
||||||
|
# cursor.fetchmany should raise an Error if called without
|
||||||
|
# issuing a query
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchmany, 4)
|
||||||
|
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
for sql in self._populate():
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
r = cur.fetchmany()
|
||||||
|
self.assertEqual(
|
||||||
|
len(r),
|
||||||
|
1,
|
||||||
|
"cursor.fetchmany retrieved incorrect number of rows, "
|
||||||
|
"default of arraysize is one.",
|
||||||
|
)
|
||||||
|
cur.arraysize = 10
|
||||||
|
r = cur.fetchmany(3) # Should get 3 rows
|
||||||
|
self.assertEqual(
|
||||||
|
len(r), 3, "cursor.fetchmany retrieved incorrect number of rows"
|
||||||
|
)
|
||||||
|
r = cur.fetchmany(4) # Should get 2 more
|
||||||
|
self.assertEqual(
|
||||||
|
len(r), 2, "cursor.fetchmany retrieved incorrect number of rows"
|
||||||
|
)
|
||||||
|
r = cur.fetchmany(4) # Should be an empty sequence
|
||||||
|
self.assertEqual(
|
||||||
|
len(r),
|
||||||
|
0,
|
||||||
|
"cursor.fetchmany should return an empty sequence after "
|
||||||
|
"results are exhausted",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 6))
|
||||||
|
|
||||||
|
# Same as above, using cursor.arraysize
|
||||||
|
cur.arraysize = 4
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
r = cur.fetchmany() # Should get 4 rows
|
||||||
|
self.assertEqual(
|
||||||
|
len(r), 4, "cursor.arraysize not being honoured by fetchmany"
|
||||||
|
)
|
||||||
|
r = cur.fetchmany() # Should get 2 more
|
||||||
|
self.assertEqual(len(r), 2)
|
||||||
|
r = cur.fetchmany() # Should be an empty sequence
|
||||||
|
self.assertEqual(len(r), 0)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 6))
|
||||||
|
|
||||||
|
cur.arraysize = 6
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
rows = cur.fetchmany() # Should get all rows
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 6))
|
||||||
|
self.assertEqual(len(rows), 6)
|
||||||
|
self.assertEqual(len(rows), 6)
|
||||||
|
rows = [r[0] for r in rows]
|
||||||
|
rows.sort()
|
||||||
|
|
||||||
|
# Make sure we get the right data back out
|
||||||
|
for i in range(0, 6):
|
||||||
|
self.assertEqual(
|
||||||
|
rows[i],
|
||||||
|
self.samples[i],
|
||||||
|
"incorrect data retrieved by cursor.fetchmany",
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = cur.fetchmany() # Should return an empty list
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows),
|
||||||
|
0,
|
||||||
|
"cursor.fetchmany should return an empty sequence if "
|
||||||
|
"called after the whole result set has been fetched",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 6))
|
||||||
|
|
||||||
|
self.executeDDL2(cur)
|
||||||
|
cur.execute("select name from %sbarflys" % self.table_prefix)
|
||||||
|
r = cur.fetchmany() # Should get empty sequence
|
||||||
|
self.assertEqual(
|
||||||
|
len(r),
|
||||||
|
0,
|
||||||
|
"cursor.fetchmany should return an empty sequence if "
|
||||||
|
"query retrieved no rows",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 0))
|
||||||
|
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_fetchall(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
# cursor.fetchall should raise an Error if called
|
||||||
|
# without executing a query that may return rows (such
|
||||||
|
# as a select)
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchall)
|
||||||
|
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
for sql in self._populate():
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
# cursor.fetchall should raise an Error if called
|
||||||
|
# after executing a a statement that cannot return rows
|
||||||
|
self.assertRaises(self.driver.Error, cur.fetchall)
|
||||||
|
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
_failUnless(self, cur.rowcount in (-1, len(self.samples)))
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows),
|
||||||
|
len(self.samples),
|
||||||
|
"cursor.fetchall did not retrieve all rows",
|
||||||
|
)
|
||||||
|
rows = [r[0] for r in rows]
|
||||||
|
rows.sort()
|
||||||
|
for i in range(0, len(self.samples)):
|
||||||
|
self.assertEqual(
|
||||||
|
rows[i], self.samples[i], "cursor.fetchall retrieved incorrect rows"
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows),
|
||||||
|
0,
|
||||||
|
"cursor.fetchall should return an empty list if called "
|
||||||
|
"after the whole result set has been fetched",
|
||||||
|
)
|
||||||
|
_failUnless(self, cur.rowcount in (-1, len(self.samples)))
|
||||||
|
|
||||||
|
self.executeDDL2(cur)
|
||||||
|
cur.execute("select name from %sbarflys" % self.table_prefix)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 0))
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows),
|
||||||
|
0,
|
||||||
|
"cursor.fetchall should return an empty list if "
|
||||||
|
"a select query returns no rows",
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_mixedfetch(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
for sql in self._populate():
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
rows1 = cur.fetchone()
|
||||||
|
rows23 = cur.fetchmany(2)
|
||||||
|
rows4 = cur.fetchone()
|
||||||
|
rows56 = cur.fetchall()
|
||||||
|
_failUnless(self, cur.rowcount in (-1, 6))
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows23), 2, "fetchmany returned incorrect number of rows"
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
len(rows56), 2, "fetchall returned incorrect number of rows"
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = [rows1[0]]
|
||||||
|
rows.extend([rows23[0][0], rows23[1][0]])
|
||||||
|
rows.append(rows4[0])
|
||||||
|
rows.extend([rows56[0][0], rows56[1][0]])
|
||||||
|
rows.sort()
|
||||||
|
for i in range(0, len(self.samples)):
|
||||||
|
self.assertEqual(
|
||||||
|
rows[i], self.samples[i], "incorrect data retrieved or inserted"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def help_nextset_setUp(self, cur):
|
||||||
|
"""Should create a procedure called deleteme
|
||||||
|
that returns two result sets, first the
|
||||||
|
number of rows in booze then "name from booze"
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("Helper not implemented")
|
||||||
|
# sql="""
|
||||||
|
# create procedure deleteme as
|
||||||
|
# begin
|
||||||
|
# select count(*) from booze
|
||||||
|
# select name from booze
|
||||||
|
# end
|
||||||
|
# """
|
||||||
|
# cur.execute(sql)
|
||||||
|
|
||||||
|
def help_nextset_tearDown(self, cur):
|
||||||
|
"If cleaning up is needed after nextSetTest"
|
||||||
|
raise NotImplementedError("Helper not implemented")
|
||||||
|
# cur.execute("drop procedure deleteme")
|
||||||
|
|
||||||
|
def test_nextset(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
if not hasattr(cur, "nextset"):
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
sql = self._populate()
|
||||||
|
for sql in self._populate():
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
self.help_nextset_setUp(cur)
|
||||||
|
|
||||||
|
cur.callproc("deleteme")
|
||||||
|
numberofrows = cur.fetchone()
|
||||||
|
assert numberofrows[0] == len(self.samples)
|
||||||
|
assert cur.nextset()
|
||||||
|
names = cur.fetchall()
|
||||||
|
assert len(names) == len(self.samples)
|
||||||
|
s = cur.nextset()
|
||||||
|
assert s == None, "No more return sets, should return None"
|
||||||
|
finally:
|
||||||
|
self.help_nextset_tearDown(cur)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_nextset(self):
|
||||||
|
raise NotImplementedError("Drivers need to override this test")
|
||||||
|
|
||||||
|
def test_arraysize(self):
|
||||||
|
# Not much here - rest of the tests for this are in test_fetchmany
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(cur, "arraysize"), "cursor.arraysize must be defined"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_setinputsizes(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
cur.setinputsizes((25,))
|
||||||
|
self._paraminsert(cur) # Make sure cursor still works
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_setoutputsize_basic(self):
|
||||||
|
# Basic test is to make sure setoutputsize doesn't blow up
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
cur.setoutputsize(1000)
|
||||||
|
cur.setoutputsize(2000, 0)
|
||||||
|
self._paraminsert(cur) # Make sure the cursor still works
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_setoutputsize(self):
|
||||||
|
# Real test for setoutputsize is driver dependant
|
||||||
|
raise NotImplementedError("Driver needed to override this test")
|
||||||
|
|
||||||
|
def test_None(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
self.executeDDL1(cur)
|
||||||
|
cur.execute("insert into %sbooze values (NULL)" % self.table_prefix)
|
||||||
|
cur.execute("select name from %sbooze" % self.table_prefix)
|
||||||
|
r = cur.fetchall()
|
||||||
|
self.assertEqual(len(r), 1)
|
||||||
|
self.assertEqual(len(r[0]), 1)
|
||||||
|
self.assertEqual(r[0][0], None, "NULL value not returned as None")
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_Date(self):
|
||||||
|
d1 = self.driver.Date(2002, 12, 25)
|
||||||
|
d2 = self.driver.DateFromTicks(time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0)))
|
||||||
|
# Can we assume this? API doesn't specify, but it seems implied
|
||||||
|
# self.assertEqual(str(d1),str(d2))
|
||||||
|
|
||||||
|
def test_Time(self):
|
||||||
|
t1 = self.driver.Time(13, 45, 30)
|
||||||
|
t2 = self.driver.TimeFromTicks(time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0)))
|
||||||
|
# Can we assume this? API doesn't specify, but it seems implied
|
||||||
|
# self.assertEqual(str(t1),str(t2))
|
||||||
|
|
||||||
|
def test_Timestamp(self):
|
||||||
|
t1 = self.driver.Timestamp(2002, 12, 25, 13, 45, 30)
|
||||||
|
t2 = self.driver.TimestampFromTicks(
|
||||||
|
time.mktime((2002, 12, 25, 13, 45, 30, 0, 0, 0))
|
||||||
|
)
|
||||||
|
# Can we assume this? API doesn't specify, but it seems implied
|
||||||
|
# self.assertEqual(str(t1),str(t2))
|
||||||
|
|
||||||
|
def test_Binary(self):
|
||||||
|
b = self.driver.Binary(str2bytes("Something"))
|
||||||
|
b = self.driver.Binary(str2bytes(""))
|
||||||
|
|
||||||
|
def test_STRING(self):
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(self.driver, "STRING"), "module.STRING must be defined"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_BINARY(self):
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(self.driver, "BINARY"), "module.BINARY must be defined."
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_NUMBER(self):
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(self.driver, "NUMBER"), "module.NUMBER must be defined."
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_DATETIME(self):
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(self.driver, "DATETIME"), "module.DATETIME must be defined."
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_ROWID(self):
|
||||||
|
_failUnless(
|
||||||
|
self, hasattr(self.driver, "ROWID"), "module.ROWID must be defined."
|
||||||
|
)
|
41
lib/adodbapi/test/is64bit.py
Normal file
41
lib/adodbapi/test/is64bit.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version"""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def Python():
|
||||||
|
if sys.platform == "cli": # IronPython
|
||||||
|
import System
|
||||||
|
|
||||||
|
return System.IntPtr.Size == 8
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return sys.maxsize > 2147483647
|
||||||
|
except AttributeError:
|
||||||
|
return sys.maxint > 2147483647
|
||||||
|
|
||||||
|
|
||||||
|
def os():
|
||||||
|
import platform
|
||||||
|
|
||||||
|
pm = platform.machine()
|
||||||
|
if pm != ".." and pm.endswith("64"): # recent Python (not Iron)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
import os
|
||||||
|
|
||||||
|
if "PROCESSOR_ARCHITEW6432" in os.environ:
|
||||||
|
return True # 32 bit program running on 64 bit Windows
|
||||||
|
try:
|
||||||
|
return os.environ["PROCESSOR_ARCHITECTURE"].endswith(
|
||||||
|
"64"
|
||||||
|
) # 64 bit Windows 64 bit program
|
||||||
|
except IndexError:
|
||||||
|
pass # not Windows
|
||||||
|
try:
|
||||||
|
return "64" in platform.architecture()[0] # this often works in Linux
|
||||||
|
except:
|
||||||
|
return False # is an older version of Python, assume also an older os (best we can guess)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("is64bit.Python() =", Python(), "is64bit.os() =", os())
|
134
lib/adodbapi/test/setuptestframework.py
Normal file
134
lib/adodbapi/test/setuptestframework.py
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
#!/usr/bin/python2
|
||||||
|
# Configure this in order to run the testcases.
|
||||||
|
"setuptestframework.py v 2.6.0.8"
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
try:
|
||||||
|
OSErrors = (WindowsError, OSError)
|
||||||
|
except NameError: # not running on Windows
|
||||||
|
OSErrors = OSError
|
||||||
|
|
||||||
|
|
||||||
|
def maketemp():
|
||||||
|
temphome = tempfile.gettempdir()
|
||||||
|
tempdir = os.path.join(temphome, "adodbapi_test")
|
||||||
|
try:
|
||||||
|
os.mkdir(tempdir)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return tempdir
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_function(testfolder, mdb_name):
|
||||||
|
try:
|
||||||
|
os.unlink(os.path.join(testfolder, mdb_name))
|
||||||
|
except:
|
||||||
|
pass # mdb database not present
|
||||||
|
try:
|
||||||
|
shutil.rmtree(testfolder)
|
||||||
|
print(" cleaned up folder", testfolder)
|
||||||
|
except:
|
||||||
|
pass # test package not present
|
||||||
|
|
||||||
|
|
||||||
|
def getcleanupfunction():
|
||||||
|
return _cleanup_function
|
||||||
|
|
||||||
|
|
||||||
|
def find_ado_path():
|
||||||
|
adoName = os.path.normpath(os.getcwd() + "/../../adodbapi.py")
|
||||||
|
adoPackage = os.path.dirname(adoName)
|
||||||
|
return adoPackage
|
||||||
|
|
||||||
|
|
||||||
|
# make a new package directory for the test copy of ado
|
||||||
|
def makeadopackage(testfolder):
|
||||||
|
adoName = os.path.normpath(os.getcwd() + "/../adodbapi.py")
|
||||||
|
adoPath = os.path.dirname(adoName)
|
||||||
|
if os.path.exists(adoName):
|
||||||
|
newpackage = os.path.join(testfolder, "adodbapi")
|
||||||
|
try:
|
||||||
|
os.mkdir(newpackage)
|
||||||
|
except OSErrors:
|
||||||
|
print(
|
||||||
|
"*Note: temporary adodbapi package already exists: may be two versions running?"
|
||||||
|
)
|
||||||
|
for f in os.listdir(adoPath):
|
||||||
|
if f.endswith(".py"):
|
||||||
|
shutil.copy(os.path.join(adoPath, f), newpackage)
|
||||||
|
if sys.version_info >= (3, 0): # only when running Py3.n
|
||||||
|
save = sys.stdout
|
||||||
|
sys.stdout = None
|
||||||
|
from lib2to3.main import main # use 2to3 to make test package
|
||||||
|
|
||||||
|
main("lib2to3.fixes", args=["-n", "-w", newpackage])
|
||||||
|
sys.stdout = save
|
||||||
|
return testfolder
|
||||||
|
else:
|
||||||
|
raise EnvironmentError("Connot find source of adodbapi to test.")
|
||||||
|
|
||||||
|
|
||||||
|
def makemdb(testfolder, mdb_name):
|
||||||
|
# following setup code borrowed from pywin32 odbc test suite
|
||||||
|
# kindly contributed by Frank Millman.
|
||||||
|
import os
|
||||||
|
|
||||||
|
_accessdatasource = os.path.join(testfolder, mdb_name)
|
||||||
|
if os.path.isfile(_accessdatasource):
|
||||||
|
print("using JET database=", _accessdatasource)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from win32com.client import constants
|
||||||
|
from win32com.client.gencache import EnsureDispatch
|
||||||
|
|
||||||
|
win32 = True
|
||||||
|
except ImportError: # perhaps we are running IronPython
|
||||||
|
win32 = False # iron Python
|
||||||
|
try:
|
||||||
|
from System import Activator, Type
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Create a brand-new database - what is the story with these?
|
||||||
|
dbe = None
|
||||||
|
for suffix in (".36", ".35", ".30"):
|
||||||
|
try:
|
||||||
|
if win32:
|
||||||
|
dbe = EnsureDispatch("DAO.DBEngine" + suffix)
|
||||||
|
else:
|
||||||
|
type = Type.GetTypeFromProgID("DAO.DBEngine" + suffix)
|
||||||
|
dbe = Activator.CreateInstance(type)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if dbe:
|
||||||
|
print(" ...Creating ACCESS db at " + _accessdatasource)
|
||||||
|
if win32:
|
||||||
|
workspace = dbe.Workspaces(0)
|
||||||
|
newdb = workspace.CreateDatabase(
|
||||||
|
_accessdatasource, constants.dbLangGeneral, constants.dbVersion40
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
newdb = dbe.CreateDatabase(
|
||||||
|
_accessdatasource, ";LANGID=0x0409;CP=1252;COUNTRY=0"
|
||||||
|
)
|
||||||
|
newdb.Close()
|
||||||
|
else:
|
||||||
|
print(" ...copying test ACCESS db to " + _accessdatasource)
|
||||||
|
mdbName = os.path.abspath(
|
||||||
|
os.path.join(os.path.dirname(__file__), "..", "examples", "test.mdb")
|
||||||
|
)
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
shutil.copy(mdbName, _accessdatasource)
|
||||||
|
|
||||||
|
return _accessdatasource
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("Setting up a Jet database for server to use for remote testing...")
|
||||||
|
temp = maketemp()
|
||||||
|
makemdb(temp, "server_test.mdb")
|
200
lib/adodbapi/test/test_adodbapi_dbapi20.py
Normal file
200
lib/adodbapi/test/test_adodbapi_dbapi20.py
Normal file
|
@ -0,0 +1,200 @@
|
||||||
|
print("This module depends on the dbapi20 compliance tests created by Stuart Bishop")
|
||||||
|
print("(see db-sig mailing list history for info)")
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import dbapi20
|
||||||
|
import setuptestframework
|
||||||
|
|
||||||
|
testfolder = setuptestframework.maketemp()
|
||||||
|
if "--package" in sys.argv:
|
||||||
|
pth = setuptestframework.makeadopackage(testfolder)
|
||||||
|
sys.argv.remove("--package")
|
||||||
|
else:
|
||||||
|
pth = setuptestframework.find_ado_path()
|
||||||
|
if pth not in sys.path:
|
||||||
|
sys.path.insert(1, pth)
|
||||||
|
# function to clean up the temporary folder -- calling program must run this function before exit.
|
||||||
|
cleanup = setuptestframework.getcleanupfunction()
|
||||||
|
|
||||||
|
import adodbapi
|
||||||
|
import adodbapi.is64bit as is64bit
|
||||||
|
|
||||||
|
db = adodbapi
|
||||||
|
|
||||||
|
if "--verbose" in sys.argv:
|
||||||
|
db.adodbapi.verbose = 3
|
||||||
|
|
||||||
|
print(adodbapi.version)
|
||||||
|
print("Tested with dbapi20 %s" % dbapi20.__version__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python
|
||||||
|
except:
|
||||||
|
onWindows = False
|
||||||
|
|
||||||
|
node = platform.node()
|
||||||
|
|
||||||
|
conn_kws = {}
|
||||||
|
host = "testsql.2txt.us,1430" # if None, will use macro to fill in node name
|
||||||
|
instance = r"%s\SQLEXPRESS"
|
||||||
|
conn_kws["name"] = "adotest"
|
||||||
|
|
||||||
|
conn_kws["user"] = "adotestuser" # None implies Windows security
|
||||||
|
conn_kws["password"] = "Sq1234567"
|
||||||
|
# macro definition for keyword "security" using macro "auto_security"
|
||||||
|
conn_kws["macro_auto_security"] = "security"
|
||||||
|
|
||||||
|
if host is None:
|
||||||
|
conn_kws["macro_getnode"] = ["host", instance]
|
||||||
|
else:
|
||||||
|
conn_kws["host"] = host
|
||||||
|
|
||||||
|
conn_kws[
|
||||||
|
"provider"
|
||||||
|
] = "Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;"
|
||||||
|
connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s"
|
||||||
|
|
||||||
|
if onWindows and node != "z-PC":
|
||||||
|
pass # default should make a local SQL Server connection
|
||||||
|
elif node == "xxx": # try Postgres database
|
||||||
|
_computername = "25.223.161.222"
|
||||||
|
_databasename = "adotest"
|
||||||
|
_username = "adotestuser"
|
||||||
|
_password = "12345678"
|
||||||
|
_driver = "PostgreSQL Unicode"
|
||||||
|
_provider = ""
|
||||||
|
connStr = "%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;" % (
|
||||||
|
_provider,
|
||||||
|
_driver,
|
||||||
|
_computername,
|
||||||
|
_databasename,
|
||||||
|
_username,
|
||||||
|
_password,
|
||||||
|
)
|
||||||
|
elif node == "yyy": # ACCESS data base is known to fail some tests.
|
||||||
|
if is64bit.Python():
|
||||||
|
driver = "Microsoft.ACE.OLEDB.12.0"
|
||||||
|
else:
|
||||||
|
driver = "Microsoft.Jet.OLEDB.4.0"
|
||||||
|
testmdb = setuptestframework.makemdb(testfolder)
|
||||||
|
connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb)
|
||||||
|
else: # try a remote connection to an SQL server
|
||||||
|
conn_kws["proxy_host"] = "25.44.77.176"
|
||||||
|
import adodbapi.remote
|
||||||
|
|
||||||
|
db = adodbapi.remote
|
||||||
|
|
||||||
|
print("Using Connection String like=%s" % connStr)
|
||||||
|
print("Keywords=%s" % repr(conn_kws))
|
||||||
|
|
||||||
|
|
||||||
|
class test_adodbapi(dbapi20.DatabaseAPI20Test):
|
||||||
|
driver = db
|
||||||
|
connect_args = (connStr,)
|
||||||
|
connect_kw_args = conn_kws
|
||||||
|
|
||||||
|
def __init__(self, arg):
|
||||||
|
dbapi20.DatabaseAPI20Test.__init__(self, arg)
|
||||||
|
|
||||||
|
def getTestMethodName(self):
|
||||||
|
return self.id().split(".")[-1]
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Call superclass setUp In case this does something in the
|
||||||
|
# future
|
||||||
|
dbapi20.DatabaseAPI20Test.setUp(self)
|
||||||
|
if self.getTestMethodName() == "test_callproc":
|
||||||
|
con = self._connect()
|
||||||
|
engine = con.dbms_name
|
||||||
|
## print('Using database Engine=%s' % engine) ##
|
||||||
|
if engine != "MS Jet":
|
||||||
|
sql = """
|
||||||
|
create procedure templower
|
||||||
|
@theData varchar(50)
|
||||||
|
as
|
||||||
|
select lower(@theData)
|
||||||
|
"""
|
||||||
|
else: # Jet
|
||||||
|
sql = """
|
||||||
|
create procedure templower
|
||||||
|
(theData varchar(50))
|
||||||
|
as
|
||||||
|
select lower(theData);
|
||||||
|
"""
|
||||||
|
cur = con.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute(sql)
|
||||||
|
con.commit()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
cur.close()
|
||||||
|
con.close()
|
||||||
|
self.lower_func = "templower"
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.getTestMethodName() == "test_callproc":
|
||||||
|
con = self._connect()
|
||||||
|
cur = con.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute("drop procedure templower")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
con.commit()
|
||||||
|
dbapi20.DatabaseAPI20Test.tearDown(self)
|
||||||
|
|
||||||
|
def help_nextset_setUp(self, cur):
|
||||||
|
"Should create a procedure called deleteme"
|
||||||
|
'that returns two result sets, first the number of rows in booze then "name from booze"'
|
||||||
|
sql = """
|
||||||
|
create procedure deleteme as
|
||||||
|
begin
|
||||||
|
select count(*) from %sbooze
|
||||||
|
select name from %sbooze
|
||||||
|
end
|
||||||
|
""" % (
|
||||||
|
self.table_prefix,
|
||||||
|
self.table_prefix,
|
||||||
|
)
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
def help_nextset_tearDown(self, cur):
|
||||||
|
"If cleaning up is needed after nextSetTest"
|
||||||
|
try:
|
||||||
|
cur.execute("drop procedure deleteme")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_nextset(self):
|
||||||
|
con = self._connect()
|
||||||
|
try:
|
||||||
|
cur = con.cursor()
|
||||||
|
|
||||||
|
stmts = [self.ddl1] + self._populate()
|
||||||
|
for sql in stmts:
|
||||||
|
cur.execute(sql)
|
||||||
|
|
||||||
|
self.help_nextset_setUp(cur)
|
||||||
|
|
||||||
|
cur.callproc("deleteme")
|
||||||
|
numberofrows = cur.fetchone()
|
||||||
|
assert numberofrows[0] == 6
|
||||||
|
assert cur.nextset()
|
||||||
|
names = cur.fetchall()
|
||||||
|
assert len(names) == len(self.samples)
|
||||||
|
s = cur.nextset()
|
||||||
|
assert s == None, "No more return sets, should return None"
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
self.help_nextset_tearDown(cur)
|
||||||
|
finally:
|
||||||
|
con.close()
|
||||||
|
|
||||||
|
def test_setoutputsize(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
|
cleanup(testfolder, None)
|
33
lib/adodbapi/test/tryconnection.py
Normal file
33
lib/adodbapi/test/tryconnection.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
remote = False # automatic testing of remote access has been removed here
|
||||||
|
|
||||||
|
|
||||||
|
def try_connection(verbose, *args, **kwargs):
|
||||||
|
import adodbapi
|
||||||
|
|
||||||
|
dbconnect = adodbapi.connect
|
||||||
|
try:
|
||||||
|
s = dbconnect(*args, **kwargs) # connect to server
|
||||||
|
if verbose:
|
||||||
|
print("Connected to:", s.connection_string)
|
||||||
|
print("which has tables:", s.get_table_names())
|
||||||
|
s.close() # thanks, it worked, goodbye
|
||||||
|
except adodbapi.DatabaseError as inst:
|
||||||
|
print(inst.args[0]) # should be the error message
|
||||||
|
print("***Failed getting connection using=", repr(args), repr(kwargs))
|
||||||
|
return False, (args, kwargs), None
|
||||||
|
|
||||||
|
print(" (successful)")
|
||||||
|
|
||||||
|
return True, (args, kwargs, remote), dbconnect
|
||||||
|
|
||||||
|
|
||||||
|
def try_operation_with_expected_exception(
|
||||||
|
expected_exception_list, some_function, *args, **kwargs
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
some_function(*args, **kwargs)
|
||||||
|
except expected_exception_list as e:
|
||||||
|
return True, e
|
||||||
|
except:
|
||||||
|
raise # an exception other than the expected occurred
|
||||||
|
return False, "The expected exception did not occur"
|
396
lib/annotated_types/__init__.py
Normal file
396
lib/annotated_types/__init__.py
Normal file
|
@ -0,0 +1,396 @@
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timezone
|
||||||
|
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
from typing_extensions import Protocol, runtime_checkable
|
||||||
|
else:
|
||||||
|
from typing import Protocol, runtime_checkable
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated, Literal
|
||||||
|
else:
|
||||||
|
from typing import Annotated, Literal
|
||||||
|
|
||||||
|
if sys.version_info < (3, 10):
|
||||||
|
EllipsisType = type(Ellipsis)
|
||||||
|
KW_ONLY = {}
|
||||||
|
SLOTS = {}
|
||||||
|
else:
|
||||||
|
from types import EllipsisType
|
||||||
|
|
||||||
|
KW_ONLY = {"kw_only": True}
|
||||||
|
SLOTS = {"slots": True}
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'BaseMetadata',
|
||||||
|
'GroupedMetadata',
|
||||||
|
'Gt',
|
||||||
|
'Ge',
|
||||||
|
'Lt',
|
||||||
|
'Le',
|
||||||
|
'Interval',
|
||||||
|
'MultipleOf',
|
||||||
|
'MinLen',
|
||||||
|
'MaxLen',
|
||||||
|
'Len',
|
||||||
|
'Timezone',
|
||||||
|
'Predicate',
|
||||||
|
'LowerCase',
|
||||||
|
'UpperCase',
|
||||||
|
'IsDigits',
|
||||||
|
'IsFinite',
|
||||||
|
'IsNotFinite',
|
||||||
|
'IsNan',
|
||||||
|
'IsNotNan',
|
||||||
|
'IsInfinite',
|
||||||
|
'IsNotInfinite',
|
||||||
|
'doc',
|
||||||
|
'DocInfo',
|
||||||
|
'__version__',
|
||||||
|
)
|
||||||
|
|
||||||
|
__version__ = '0.6.0'
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
# arguments that start with __ are considered
|
||||||
|
# positional only
|
||||||
|
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGt(Protocol):
|
||||||
|
def __gt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGe(Protocol):
|
||||||
|
def __ge__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLt(Protocol):
|
||||||
|
def __lt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLe(Protocol):
|
||||||
|
def __le__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsMod(Protocol):
|
||||||
|
def __mod__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsDiv(Protocol):
|
||||||
|
def __div__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMetadata:
|
||||||
|
"""Base class for all metadata.
|
||||||
|
|
||||||
|
This exists mainly so that implementers
|
||||||
|
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Gt(BaseMetadata):
|
||||||
|
"""Gt(gt=x) implies that the value must be greater than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: SupportsGt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Ge(BaseMetadata):
|
||||||
|
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ge: SupportsGe
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Lt(BaseMetadata):
|
||||||
|
"""Lt(lt=x) implies that the value must be less than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
lt: SupportsLt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Le(BaseMetadata):
|
||||||
|
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
le: SupportsLe
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class GroupedMetadata(Protocol):
|
||||||
|
"""A grouping of multiple BaseMetadata objects.
|
||||||
|
|
||||||
|
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||||
|
All it the the constraint and metadata should be fully expressable
|
||||||
|
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||||
|
|
||||||
|
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||||
|
to add their own metadata.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> @dataclass
|
||||||
|
>>> class Field(GroupedMetadata):
|
||||||
|
>>> gt: float | None = None
|
||||||
|
>>> description: str | None = None
|
||||||
|
...
|
||||||
|
>>> def __iter__(self) -> Iterable[BaseMetadata]:
|
||||||
|
>>> if self.gt is not None:
|
||||||
|
>>> yield Gt(self.gt)
|
||||||
|
>>> if self.description is not None:
|
||||||
|
>>> yield Description(self.gt)
|
||||||
|
|
||||||
|
Also see the implementation of `Interval` below for an example.
|
||||||
|
|
||||||
|
Parsers should recognize this and unpack it so that it can be used
|
||||||
|
both with and without unpacking:
|
||||||
|
|
||||||
|
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||||
|
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||||
|
""" # noqa: trailing-whitespace
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
...
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
__slots__ = () # allow subclasses to use slots
|
||||||
|
|
||||||
|
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||||
|
# Basic ABC like functionality without the complexity of an ABC
|
||||||
|
super().__init_subclass__(*args, **kwargs)
|
||||||
|
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||||
|
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811
|
||||||
|
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||||
|
class Interval(GroupedMetadata):
|
||||||
|
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||||
|
|
||||||
|
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||||
|
are interpreted the same way as the single-bound constraints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: Union[SupportsGt, None] = None
|
||||||
|
ge: Union[SupportsGe, None] = None
|
||||||
|
lt: Union[SupportsLt, None] = None
|
||||||
|
le: Union[SupportsLe, None] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack an Interval into zero or more single-bounds."""
|
||||||
|
if self.gt is not None:
|
||||||
|
yield Gt(self.gt)
|
||||||
|
if self.ge is not None:
|
||||||
|
yield Ge(self.ge)
|
||||||
|
if self.lt is not None:
|
||||||
|
yield Lt(self.lt)
|
||||||
|
if self.le is not None:
|
||||||
|
yield Le(self.le)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MultipleOf(BaseMetadata):
|
||||||
|
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||||
|
|
||||||
|
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||||
|
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||||
|
|
||||||
|
We encourage users to be aware of these two common interpretations,
|
||||||
|
and libraries to carefully document which they implement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MinLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MinLen() implies minimum inclusive length,
|
||||||
|
e.g. ``len(value) >= min_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MaxLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MaxLen() implies maximum inclusive length,
|
||||||
|
e.g. ``len(value) <= max_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Len(GroupedMetadata):
|
||||||
|
"""
|
||||||
|
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||||
|
|
||||||
|
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)] = 0
|
||||||
|
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack a Len into zone or more single-bounds."""
|
||||||
|
if self.min_length > 0:
|
||||||
|
yield MinLen(self.min_length)
|
||||||
|
if self.max_length is not None:
|
||||||
|
yield MaxLen(self.max_length)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Timezone(BaseMetadata):
|
||||||
|
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||||
|
|
||||||
|
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||||
|
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||||
|
tz-aware but any timezone is allowed.
|
||||||
|
|
||||||
|
You may also pass a specific timezone string or timezone object such as
|
||||||
|
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||||
|
you only allow a specific timezone, though we note that this is often
|
||||||
|
a symptom of poor design.
|
||||||
|
"""
|
||||||
|
|
||||||
|
tz: Union[str, timezone, EllipsisType, None]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Predicate(BaseMetadata):
|
||||||
|
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||||
|
|
||||||
|
Users should prefer statically inspectable metadata, but if you need the full
|
||||||
|
power and flexibility of arbitrary runtime predicates... here it is.
|
||||||
|
|
||||||
|
We provide a few predefined predicates for common string constraints:
|
||||||
|
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||||
|
``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||||
|
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||||
|
|
||||||
|
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||||
|
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||||
|
enforce digit-only strings, and customise some generated external schema.
|
||||||
|
|
||||||
|
We do not specify what behaviour should be expected for predicates that raise
|
||||||
|
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||||
|
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||||
|
and then propogate or discard the resulting exception.
|
||||||
|
"""
|
||||||
|
|
||||||
|
func: Callable[[Any], bool]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Not:
|
||||||
|
func: Callable[[Any], bool]
|
||||||
|
|
||||||
|
def __call__(self, __v: Any) -> bool:
|
||||||
|
return not self.func(__v)
|
||||||
|
|
||||||
|
|
||||||
|
_StrType = TypeVar("_StrType", bound=str)
|
||||||
|
|
||||||
|
LowerCase = Annotated[_StrType, Predicate(str.islower)]
|
||||||
|
"""
|
||||||
|
Return True if the string is a lowercase string, False otherwise.
|
||||||
|
|
||||||
|
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
|
||||||
|
"""
|
||||||
|
Return True if the string is an uppercase string, False otherwise.
|
||||||
|
|
||||||
|
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
IsDigits = Annotated[_StrType, Predicate(str.isdigit)]
|
||||||
|
"""
|
||||||
|
Return True if the string is a digit string, False otherwise.
|
||||||
|
|
||||||
|
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
|
||||||
|
"""
|
||||||
|
Return True if all characters in the string are ASCII, False otherwise.
|
||||||
|
|
||||||
|
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
|
||||||
|
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
|
||||||
|
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
|
||||||
|
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
|
||||||
|
"""Return True if x is one of infinity or NaN, and False otherwise"""
|
||||||
|
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
|
||||||
|
"""Return True if x is a NaN (not a number), and False otherwise."""
|
||||||
|
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
|
||||||
|
"""Return True if x is anything but NaN (not a number), and False otherwise."""
|
||||||
|
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
|
||||||
|
"""Return True if x is a positive or negative infinity, and False otherwise."""
|
||||||
|
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
|
||||||
|
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class DocInfo: # type: ignore [no-redef]
|
||||||
|
""" "
|
||||||
|
The return value of doc(), mainly to be used by tools that want to extract the
|
||||||
|
Annotated documentation at runtime.
|
||||||
|
"""
|
||||||
|
|
||||||
|
documentation: str
|
||||||
|
"""The documentation string passed to doc()."""
|
||||||
|
|
||||||
|
def doc(
|
||||||
|
documentation: str,
|
||||||
|
) -> DocInfo:
|
||||||
|
"""
|
||||||
|
Add documentation to a type annotation inside of Annotated.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
|
||||||
|
"""
|
||||||
|
return DocInfo(documentation)
|
147
lib/annotated_types/test_cases.py
Normal file
147
lib/annotated_types/test_cases.py
Normal file
|
@ -0,0 +1,147 @@
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
from datetime import date, datetime, timedelta, timezone
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
else:
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import annotated_types as at
|
||||||
|
|
||||||
|
|
||||||
|
class Case(NamedTuple):
|
||||||
|
"""
|
||||||
|
A test case for `annotated_types`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
annotation: Any
|
||||||
|
valid_cases: Iterable[Any]
|
||||||
|
invalid_cases: Iterable[Any]
|
||||||
|
|
||||||
|
|
||||||
|
def cases() -> Iterable[Case]:
|
||||||
|
# Gt, Ge, Lt, Le
|
||||||
|
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||||
|
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||||
|
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||||
|
[Decimal('1.1231'), Decimal('123')],
|
||||||
|
[Decimal('1.123'), Decimal('0')],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||||
|
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||||
|
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||||
|
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Interval
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||||
|
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||||
|
|
||||||
|
# lengths
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||||
|
|
||||||
|
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||||
|
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||||
|
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||||
|
|
||||||
|
# Timezone
|
||||||
|
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone('Europe/London')],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
|
||||||
|
# predicate types
|
||||||
|
|
||||||
|
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||||
|
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||||
|
yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2'])
|
||||||
|
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||||
|
|
||||||
|
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
|
||||||
|
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
|
||||||
|
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
|
||||||
|
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
|
||||||
|
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
|
||||||
|
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
|
||||||
|
|
||||||
|
# check stacked predicates
|
||||||
|
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
|
||||||
|
|
||||||
|
# doc
|
||||||
|
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
|
||||||
|
|
||||||
|
# custom GroupedMetadata
|
||||||
|
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||||
|
def __iter__(self) -> Iterator[at.Predicate]:
|
||||||
|
yield at.Predicate(lambda x: float(x).is_integer())
|
||||||
|
|
||||||
|
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
|
@ -1 +1 @@
|
||||||
__version__ = "1.2.3"
|
__version__ = "1.3.0"
|
||||||
|
|
|
@ -168,9 +168,9 @@ class Arrow:
|
||||||
isinstance(tzinfo, dt_tzinfo)
|
isinstance(tzinfo, dt_tzinfo)
|
||||||
and hasattr(tzinfo, "localize")
|
and hasattr(tzinfo, "localize")
|
||||||
and hasattr(tzinfo, "zone")
|
and hasattr(tzinfo, "zone")
|
||||||
and tzinfo.zone # type: ignore[attr-defined]
|
and tzinfo.zone
|
||||||
):
|
):
|
||||||
tzinfo = parser.TzinfoParser.parse(tzinfo.zone) # type: ignore[attr-defined]
|
tzinfo = parser.TzinfoParser.parse(tzinfo.zone)
|
||||||
elif isinstance(tzinfo, str):
|
elif isinstance(tzinfo, str):
|
||||||
tzinfo = parser.TzinfoParser.parse(tzinfo)
|
tzinfo = parser.TzinfoParser.parse(tzinfo)
|
||||||
|
|
||||||
|
@ -495,7 +495,7 @@ class Arrow:
|
||||||
yield current
|
yield current
|
||||||
|
|
||||||
values = [getattr(current, f) for f in cls._ATTRS]
|
values = [getattr(current, f) for f in cls._ATTRS]
|
||||||
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore
|
current = cls(*values, tzinfo=tzinfo).shift( # type: ignore[misc]
|
||||||
**{frame_relative: relative_steps}
|
**{frame_relative: relative_steps}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -578,7 +578,7 @@ class Arrow:
|
||||||
for _ in range(3 - len(values)):
|
for _ in range(3 - len(values)):
|
||||||
values.append(1)
|
values.append(1)
|
||||||
|
|
||||||
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore
|
floor = self.__class__(*values, tzinfo=self.tzinfo) # type: ignore[misc]
|
||||||
|
|
||||||
if frame_absolute == "week":
|
if frame_absolute == "week":
|
||||||
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7
|
# if week_start is greater than self.isoweekday() go back one week by setting delta = 7
|
||||||
|
@ -792,7 +792,6 @@ class Arrow:
|
||||||
return self._datetime.isoformat()
|
return self._datetime.isoformat()
|
||||||
|
|
||||||
def __format__(self, formatstr: str) -> str:
|
def __format__(self, formatstr: str) -> str:
|
||||||
|
|
||||||
if len(formatstr) > 0:
|
if len(formatstr) > 0:
|
||||||
return self.format(formatstr)
|
return self.format(formatstr)
|
||||||
|
|
||||||
|
@ -804,7 +803,6 @@ class Arrow:
|
||||||
# attributes and properties
|
# attributes and properties
|
||||||
|
|
||||||
def __getattr__(self, name: str) -> int:
|
def __getattr__(self, name: str) -> int:
|
||||||
|
|
||||||
if name == "week":
|
if name == "week":
|
||||||
return self.isocalendar()[1]
|
return self.isocalendar()[1]
|
||||||
|
|
||||||
|
@ -965,7 +963,6 @@ class Arrow:
|
||||||
absolute_kwargs = {}
|
absolute_kwargs = {}
|
||||||
|
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
|
|
||||||
if key in self._ATTRS:
|
if key in self._ATTRS:
|
||||||
absolute_kwargs[key] = value
|
absolute_kwargs[key] = value
|
||||||
elif key in ["week", "quarter"]:
|
elif key in ["week", "quarter"]:
|
||||||
|
@ -1022,7 +1019,6 @@ class Arrow:
|
||||||
additional_attrs = ["weeks", "quarters", "weekday"]
|
additional_attrs = ["weeks", "quarters", "weekday"]
|
||||||
|
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
|
|
||||||
if key in self._ATTRS_PLURAL or key in additional_attrs:
|
if key in self._ATTRS_PLURAL or key in additional_attrs:
|
||||||
relative_kwargs[key] = value
|
relative_kwargs[key] = value
|
||||||
else:
|
else:
|
||||||
|
@ -1259,11 +1255,10 @@ class Arrow:
|
||||||
)
|
)
|
||||||
|
|
||||||
if trunc(abs(delta)) != 1:
|
if trunc(abs(delta)) != 1:
|
||||||
granularity += "s" # type: ignore
|
granularity += "s" # type: ignore[assignment]
|
||||||
return locale.describe(granularity, delta, only_distance=only_distance)
|
return locale.describe(granularity, delta, only_distance=only_distance)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
if not granularity:
|
if not granularity:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Empty granularity list provided. "
|
"Empty granularity list provided. "
|
||||||
|
@ -1314,7 +1309,7 @@ class Arrow:
|
||||||
|
|
||||||
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
|
def dehumanize(self, input_string: str, locale: str = "en_us") -> "Arrow":
|
||||||
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
|
"""Returns a new :class:`Arrow <arrow.arrow.Arrow>` object, that represents
|
||||||
the time difference relative to the attrbiutes of the
|
the time difference relative to the attributes of the
|
||||||
:class:`Arrow <arrow.arrow.Arrow>` object.
|
:class:`Arrow <arrow.arrow.Arrow>` object.
|
||||||
|
|
||||||
:param timestring: a ``str`` representing a humanized relative time.
|
:param timestring: a ``str`` representing a humanized relative time.
|
||||||
|
@ -1367,7 +1362,6 @@ class Arrow:
|
||||||
|
|
||||||
# Search input string for each time unit within locale
|
# Search input string for each time unit within locale
|
||||||
for unit, unit_object in locale_obj.timeframes.items():
|
for unit, unit_object in locale_obj.timeframes.items():
|
||||||
|
|
||||||
# Need to check the type of unit_object to create the correct dictionary
|
# Need to check the type of unit_object to create the correct dictionary
|
||||||
if isinstance(unit_object, Mapping):
|
if isinstance(unit_object, Mapping):
|
||||||
strings_to_search = unit_object
|
strings_to_search = unit_object
|
||||||
|
@ -1378,7 +1372,6 @@ class Arrow:
|
||||||
# Needs to cycle all through strings as some locales have strings that
|
# Needs to cycle all through strings as some locales have strings that
|
||||||
# could overlap in a regex match, since input validation isn't being performed.
|
# could overlap in a regex match, since input validation isn't being performed.
|
||||||
for time_delta, time_string in strings_to_search.items():
|
for time_delta, time_string in strings_to_search.items():
|
||||||
|
|
||||||
# Replace {0} with regex \d representing digits
|
# Replace {0} with regex \d representing digits
|
||||||
search_string = str(time_string)
|
search_string = str(time_string)
|
||||||
search_string = search_string.format(r"\d+")
|
search_string = search_string.format(r"\d+")
|
||||||
|
@ -1419,7 +1412,7 @@ class Arrow:
|
||||||
# Assert error if string does not modify any units
|
# Assert error if string does not modify any units
|
||||||
if not any([True for k, v in unit_visited.items() if v]):
|
if not any([True for k, v in unit_visited.items() if v]):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Input string not valid. Note: Some locales do not support the week granulairty in Arrow. "
|
"Input string not valid. Note: Some locales do not support the week granularity in Arrow. "
|
||||||
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
|
"If you are attempting to use the week granularity on an unsupported locale, this could be the cause of this error."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1718,7 +1711,6 @@ class Arrow:
|
||||||
# math
|
# math
|
||||||
|
|
||||||
def __add__(self, other: Any) -> "Arrow":
|
def __add__(self, other: Any) -> "Arrow":
|
||||||
|
|
||||||
if isinstance(other, (timedelta, relativedelta)):
|
if isinstance(other, (timedelta, relativedelta)):
|
||||||
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
|
return self.fromdatetime(self._datetime + other, self._datetime.tzinfo)
|
||||||
|
|
||||||
|
@ -1736,7 +1728,6 @@ class Arrow:
|
||||||
pass # pragma: no cover
|
pass # pragma: no cover
|
||||||
|
|
||||||
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
|
def __sub__(self, other: Any) -> Union[timedelta, "Arrow"]:
|
||||||
|
|
||||||
if isinstance(other, (timedelta, relativedelta)):
|
if isinstance(other, (timedelta, relativedelta)):
|
||||||
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
return self.fromdatetime(self._datetime - other, self._datetime.tzinfo)
|
||||||
|
|
||||||
|
@ -1749,7 +1740,6 @@ class Arrow:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def __rsub__(self, other: Any) -> timedelta:
|
def __rsub__(self, other: Any) -> timedelta:
|
||||||
|
|
||||||
if isinstance(other, dt_datetime):
|
if isinstance(other, dt_datetime):
|
||||||
return other - self._datetime
|
return other - self._datetime
|
||||||
|
|
||||||
|
@ -1758,42 +1748,36 @@ class Arrow:
|
||||||
# comparisons
|
# comparisons
|
||||||
|
|
||||||
def __eq__(self, other: Any) -> bool:
|
def __eq__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return self._datetime == self._get_datetime(other)
|
return self._datetime == self._get_datetime(other)
|
||||||
|
|
||||||
def __ne__(self, other: Any) -> bool:
|
def __ne__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __gt__(self, other: Any) -> bool:
|
def __gt__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
return self._datetime > self._get_datetime(other)
|
return self._datetime > self._get_datetime(other)
|
||||||
|
|
||||||
def __ge__(self, other: Any) -> bool:
|
def __ge__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
return self._datetime >= self._get_datetime(other)
|
return self._datetime >= self._get_datetime(other)
|
||||||
|
|
||||||
def __lt__(self, other: Any) -> bool:
|
def __lt__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
return self._datetime < self._get_datetime(other)
|
return self._datetime < self._get_datetime(other)
|
||||||
|
|
||||||
def __le__(self, other: Any) -> bool:
|
def __le__(self, other: Any) -> bool:
|
||||||
|
|
||||||
if not isinstance(other, (Arrow, dt_datetime)):
|
if not isinstance(other, (Arrow, dt_datetime)):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
|
@ -1865,7 +1849,6 @@ class Arrow:
|
||||||
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
|
def _get_iteration_params(cls, end: Any, limit: Optional[int]) -> Tuple[Any, int]:
|
||||||
"""Sets default end and limit values for range method."""
|
"""Sets default end and limit values for range method."""
|
||||||
if end is None:
|
if end is None:
|
||||||
|
|
||||||
if limit is None:
|
if limit is None:
|
||||||
raise ValueError("One of 'end' or 'limit' is required.")
|
raise ValueError("One of 'end' or 'limit' is required.")
|
||||||
|
|
||||||
|
|
|
@ -267,11 +267,9 @@ class ArrowFactory:
|
||||||
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
|
raise TypeError(f"Cannot parse single argument of type {type(arg)!r}.")
|
||||||
|
|
||||||
elif arg_count == 2:
|
elif arg_count == 2:
|
||||||
|
|
||||||
arg_1, arg_2 = args[0], args[1]
|
arg_1, arg_2 = args[0], args[1]
|
||||||
|
|
||||||
if isinstance(arg_1, datetime):
|
if isinstance(arg_1, datetime):
|
||||||
|
|
||||||
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo
|
# (datetime, tzinfo/str) -> fromdatetime @ tzinfo
|
||||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||||
return self.type.fromdatetime(arg_1, tzinfo=arg_2)
|
return self.type.fromdatetime(arg_1, tzinfo=arg_2)
|
||||||
|
@ -281,7 +279,6 @@ class ArrowFactory:
|
||||||
)
|
)
|
||||||
|
|
||||||
elif isinstance(arg_1, date):
|
elif isinstance(arg_1, date):
|
||||||
|
|
||||||
# (date, tzinfo/str) -> fromdate @ tzinfo
|
# (date, tzinfo/str) -> fromdate @ tzinfo
|
||||||
if isinstance(arg_2, (dt_tzinfo, str)):
|
if isinstance(arg_2, (dt_tzinfo, str)):
|
||||||
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
||||||
|
|
|
@ -29,7 +29,6 @@ FORMAT_W3C: Final[str] = "YYYY-MM-DD HH:mm:ssZZ"
|
||||||
|
|
||||||
|
|
||||||
class DateTimeFormatter:
|
class DateTimeFormatter:
|
||||||
|
|
||||||
# This pattern matches characters enclosed in square brackets are matched as
|
# This pattern matches characters enclosed in square brackets are matched as
|
||||||
# an atomic group. For more info on atomic groups and how to they are
|
# an atomic group. For more info on atomic groups and how to they are
|
||||||
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
|
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
|
||||||
|
@ -41,18 +40,15 @@ class DateTimeFormatter:
|
||||||
locale: locales.Locale
|
locale: locales.Locale
|
||||||
|
|
||||||
def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
|
def __init__(self, locale: str = DEFAULT_LOCALE) -> None:
|
||||||
|
|
||||||
self.locale = locales.get_locale(locale)
|
self.locale = locales.get_locale(locale)
|
||||||
|
|
||||||
def format(cls, dt: datetime, fmt: str) -> str:
|
def format(cls, dt: datetime, fmt: str) -> str:
|
||||||
|
|
||||||
# FIXME: _format_token() is nullable
|
# FIXME: _format_token() is nullable
|
||||||
return cls._FORMAT_RE.sub(
|
return cls._FORMAT_RE.sub(
|
||||||
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
|
lambda m: cast(str, cls._format_token(dt, m.group(0))), fmt
|
||||||
)
|
)
|
||||||
|
|
||||||
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
|
def _format_token(self, dt: datetime, token: Optional[str]) -> Optional[str]:
|
||||||
|
|
||||||
if token and token.startswith("[") and token.endswith("]"):
|
if token and token.startswith("[") and token.endswith("]"):
|
||||||
return token[1:-1]
|
return token[1:-1]
|
||||||
|
|
||||||
|
|
|
@ -129,7 +129,6 @@ class Locale:
|
||||||
_locale_map[locale_name.lower().replace("_", "-")] = cls
|
_locale_map[locale_name.lower().replace("_", "-")] = cls
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
|
||||||
self._month_name_to_ordinal = None
|
self._month_name_to_ordinal = None
|
||||||
|
|
||||||
def describe(
|
def describe(
|
||||||
|
@ -174,7 +173,7 @@ class Locale:
|
||||||
# Needed to determine the correct relative string to use
|
# Needed to determine the correct relative string to use
|
||||||
timeframe_value = 0
|
timeframe_value = 0
|
||||||
|
|
||||||
for _unit_name, unit_value in timeframes:
|
for _, unit_value in timeframes:
|
||||||
if trunc(unit_value) != 0:
|
if trunc(unit_value) != 0:
|
||||||
timeframe_value = trunc(unit_value)
|
timeframe_value = trunc(unit_value)
|
||||||
break
|
break
|
||||||
|
@ -285,7 +284,6 @@ class Locale:
|
||||||
timeframe: TimeFrameLiteral,
|
timeframe: TimeFrameLiteral,
|
||||||
delta: Union[float, int],
|
delta: Union[float, int],
|
||||||
) -> str:
|
) -> str:
|
||||||
|
|
||||||
if timeframe == "now":
|
if timeframe == "now":
|
||||||
return humanized
|
return humanized
|
||||||
|
|
||||||
|
@ -425,7 +423,7 @@ class ItalianLocale(Locale):
|
||||||
"hours": "{0} ore",
|
"hours": "{0} ore",
|
||||||
"day": "un giorno",
|
"day": "un giorno",
|
||||||
"days": "{0} giorni",
|
"days": "{0} giorni",
|
||||||
"week": "una settimana,",
|
"week": "una settimana",
|
||||||
"weeks": "{0} settimane",
|
"weeks": "{0} settimane",
|
||||||
"month": "un mese",
|
"month": "un mese",
|
||||||
"months": "{0} mesi",
|
"months": "{0} mesi",
|
||||||
|
@ -867,14 +865,16 @@ class FinnishLocale(Locale):
|
||||||
|
|
||||||
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
|
timeframes: ClassVar[Mapping[TimeFrameLiteral, Union[str, Mapping[str, str]]]] = {
|
||||||
"now": "juuri nyt",
|
"now": "juuri nyt",
|
||||||
"second": "sekunti",
|
"second": {"past": "sekunti", "future": "sekunnin"},
|
||||||
"seconds": {"past": "{0} muutama sekunti", "future": "{0} muutaman sekunnin"},
|
"seconds": {"past": "{0} sekuntia", "future": "{0} sekunnin"},
|
||||||
"minute": {"past": "minuutti", "future": "minuutin"},
|
"minute": {"past": "minuutti", "future": "minuutin"},
|
||||||
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
|
"minutes": {"past": "{0} minuuttia", "future": "{0} minuutin"},
|
||||||
"hour": {"past": "tunti", "future": "tunnin"},
|
"hour": {"past": "tunti", "future": "tunnin"},
|
||||||
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
|
"hours": {"past": "{0} tuntia", "future": "{0} tunnin"},
|
||||||
"day": "päivä",
|
"day": {"past": "päivä", "future": "päivän"},
|
||||||
"days": {"past": "{0} päivää", "future": "{0} päivän"},
|
"days": {"past": "{0} päivää", "future": "{0} päivän"},
|
||||||
|
"week": {"past": "viikko", "future": "viikon"},
|
||||||
|
"weeks": {"past": "{0} viikkoa", "future": "{0} viikon"},
|
||||||
"month": {"past": "kuukausi", "future": "kuukauden"},
|
"month": {"past": "kuukausi", "future": "kuukauden"},
|
||||||
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
|
"months": {"past": "{0} kuukautta", "future": "{0} kuukauden"},
|
||||||
"year": {"past": "vuosi", "future": "vuoden"},
|
"year": {"past": "vuosi", "future": "vuoden"},
|
||||||
|
@ -1887,7 +1887,7 @@ class GermanBaseLocale(Locale):
|
||||||
future = "in {0}"
|
future = "in {0}"
|
||||||
and_word = "und"
|
and_word = "und"
|
||||||
|
|
||||||
timeframes = {
|
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||||
"now": "gerade eben",
|
"now": "gerade eben",
|
||||||
"second": "einer Sekunde",
|
"second": "einer Sekunde",
|
||||||
"seconds": "{0} Sekunden",
|
"seconds": "{0} Sekunden",
|
||||||
|
@ -1982,7 +1982,9 @@ class GermanBaseLocale(Locale):
|
||||||
return super().describe(timeframe, delta, only_distance)
|
return super().describe(timeframe, delta, only_distance)
|
||||||
|
|
||||||
# German uses a different case without 'in' or 'ago'
|
# German uses a different case without 'in' or 'ago'
|
||||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||||
|
trunc(abs(delta))
|
||||||
|
)
|
||||||
|
|
||||||
return humanized
|
return humanized
|
||||||
|
|
||||||
|
@ -2547,6 +2549,8 @@ class ArabicLocale(Locale):
|
||||||
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
|
"hours": {"2": "ساعتين", "ten": "{0} ساعات", "higher": "{0} ساعة"},
|
||||||
"day": "يوم",
|
"day": "يوم",
|
||||||
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
|
"days": {"2": "يومين", "ten": "{0} أيام", "higher": "{0} يوم"},
|
||||||
|
"week": "اسبوع",
|
||||||
|
"weeks": {"2": "اسبوعين", "ten": "{0} أسابيع", "higher": "{0} اسبوع"},
|
||||||
"month": "شهر",
|
"month": "شهر",
|
||||||
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
|
"months": {"2": "شهرين", "ten": "{0} أشهر", "higher": "{0} شهر"},
|
||||||
"year": "سنة",
|
"year": "سنة",
|
||||||
|
@ -3709,6 +3713,8 @@ class HungarianLocale(Locale):
|
||||||
"hours": {"past": "{0} órával", "future": "{0} óra"},
|
"hours": {"past": "{0} órával", "future": "{0} óra"},
|
||||||
"day": {"past": "egy nappal", "future": "egy nap"},
|
"day": {"past": "egy nappal", "future": "egy nap"},
|
||||||
"days": {"past": "{0} nappal", "future": "{0} nap"},
|
"days": {"past": "{0} nappal", "future": "{0} nap"},
|
||||||
|
"week": {"past": "egy héttel", "future": "egy hét"},
|
||||||
|
"weeks": {"past": "{0} héttel", "future": "{0} hét"},
|
||||||
"month": {"past": "egy hónappal", "future": "egy hónap"},
|
"month": {"past": "egy hónappal", "future": "egy hónap"},
|
||||||
"months": {"past": "{0} hónappal", "future": "{0} hónap"},
|
"months": {"past": "{0} hónappal", "future": "{0} hónap"},
|
||||||
"year": {"past": "egy évvel", "future": "egy év"},
|
"year": {"past": "egy évvel", "future": "egy év"},
|
||||||
|
@ -3934,7 +3940,6 @@ class ThaiLocale(Locale):
|
||||||
|
|
||||||
|
|
||||||
class LaotianLocale(Locale):
|
class LaotianLocale(Locale):
|
||||||
|
|
||||||
names = ["lo", "lo-la"]
|
names = ["lo", "lo-la"]
|
||||||
|
|
||||||
past = "{0} ກ່ອນຫນ້ານີ້"
|
past = "{0} ກ່ອນຫນ້ານີ້"
|
||||||
|
@ -4119,6 +4124,7 @@ class BengaliLocale(Locale):
|
||||||
return f"{n}র্থ"
|
return f"{n}র্থ"
|
||||||
if n == 6:
|
if n == 6:
|
||||||
return f"{n}ষ্ঠ"
|
return f"{n}ষ্ঠ"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
class RomanshLocale(Locale):
|
class RomanshLocale(Locale):
|
||||||
|
@ -4137,6 +4143,8 @@ class RomanshLocale(Locale):
|
||||||
"hours": "{0} ura",
|
"hours": "{0} ura",
|
||||||
"day": "in di",
|
"day": "in di",
|
||||||
"days": "{0} dis",
|
"days": "{0} dis",
|
||||||
|
"week": "in'emna",
|
||||||
|
"weeks": "{0} emnas",
|
||||||
"month": "in mais",
|
"month": "in mais",
|
||||||
"months": "{0} mais",
|
"months": "{0} mais",
|
||||||
"year": "in onn",
|
"year": "in onn",
|
||||||
|
@ -5399,7 +5407,7 @@ class LuxembourgishLocale(Locale):
|
||||||
future = "an {0}"
|
future = "an {0}"
|
||||||
and_word = "an"
|
and_word = "an"
|
||||||
|
|
||||||
timeframes = {
|
timeframes: ClassVar[Dict[TimeFrameLiteral, str]] = {
|
||||||
"now": "just elo",
|
"now": "just elo",
|
||||||
"second": "enger Sekonn",
|
"second": "enger Sekonn",
|
||||||
"seconds": "{0} Sekonnen",
|
"seconds": "{0} Sekonnen",
|
||||||
|
@ -5487,7 +5495,9 @@ class LuxembourgishLocale(Locale):
|
||||||
return super().describe(timeframe, delta, only_distance)
|
return super().describe(timeframe, delta, only_distance)
|
||||||
|
|
||||||
# Luxembourgish uses a different case without 'in' or 'ago'
|
# Luxembourgish uses a different case without 'in' or 'ago'
|
||||||
humanized = self.timeframes_only_distance[timeframe].format(trunc(abs(delta)))
|
humanized: str = self.timeframes_only_distance[timeframe].format(
|
||||||
|
trunc(abs(delta))
|
||||||
|
)
|
||||||
|
|
||||||
return humanized
|
return humanized
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,6 @@ class DateTimeParser:
|
||||||
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
|
_input_re_map: Dict[_FORMAT_TYPE, Pattern[str]]
|
||||||
|
|
||||||
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
|
def __init__(self, locale: str = DEFAULT_LOCALE, cache_size: int = 0) -> None:
|
||||||
|
|
||||||
self.locale = locales.get_locale(locale)
|
self.locale = locales.get_locale(locale)
|
||||||
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
||||||
self._input_re_map.update(
|
self._input_re_map.update(
|
||||||
|
@ -196,7 +195,6 @@ class DateTimeParser:
|
||||||
def parse_iso(
|
def parse_iso(
|
||||||
self, datetime_string: str, normalize_whitespace: bool = False
|
self, datetime_string: str, normalize_whitespace: bool = False
|
||||||
) -> datetime:
|
) -> datetime:
|
||||||
|
|
||||||
if normalize_whitespace:
|
if normalize_whitespace:
|
||||||
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
|
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
|
||||||
|
|
||||||
|
@ -236,13 +234,14 @@ class DateTimeParser:
|
||||||
]
|
]
|
||||||
|
|
||||||
if has_time:
|
if has_time:
|
||||||
|
|
||||||
if has_space_divider:
|
if has_space_divider:
|
||||||
date_string, time_string = datetime_string.split(" ", 1)
|
date_string, time_string = datetime_string.split(" ", 1)
|
||||||
else:
|
else:
|
||||||
date_string, time_string = datetime_string.split("T", 1)
|
date_string, time_string = datetime_string.split("T", 1)
|
||||||
|
|
||||||
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE)
|
time_parts = re.split(
|
||||||
|
r"[\+\-Z]", time_string, maxsplit=1, flags=re.IGNORECASE
|
||||||
|
)
|
||||||
|
|
||||||
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
|
time_components: Optional[Match[str]] = self._TIME_RE.match(time_parts[0])
|
||||||
|
|
||||||
|
@ -303,7 +302,6 @@ class DateTimeParser:
|
||||||
fmt: Union[List[str], str],
|
fmt: Union[List[str], str],
|
||||||
normalize_whitespace: bool = False,
|
normalize_whitespace: bool = False,
|
||||||
) -> datetime:
|
) -> datetime:
|
||||||
|
|
||||||
if normalize_whitespace:
|
if normalize_whitespace:
|
||||||
datetime_string = re.sub(r"\s+", " ", datetime_string)
|
datetime_string = re.sub(r"\s+", " ", datetime_string)
|
||||||
|
|
||||||
|
@ -341,12 +339,11 @@ class DateTimeParser:
|
||||||
f"Unable to find a match group for the specified token {token!r}."
|
f"Unable to find a match group for the specified token {token!r}."
|
||||||
)
|
)
|
||||||
|
|
||||||
self._parse_token(token, value, parts) # type: ignore
|
self._parse_token(token, value, parts) # type: ignore[arg-type]
|
||||||
|
|
||||||
return self._build_datetime(parts)
|
return self._build_datetime(parts)
|
||||||
|
|
||||||
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
|
def _generate_pattern_re(self, fmt: str) -> Tuple[List[_FORMAT_TYPE], Pattern[str]]:
|
||||||
|
|
||||||
# fmt is a string of tokens like 'YYYY-MM-DD'
|
# fmt is a string of tokens like 'YYYY-MM-DD'
|
||||||
# we construct a new string by replacing each
|
# we construct a new string by replacing each
|
||||||
# token by its pattern:
|
# token by its pattern:
|
||||||
|
@ -498,7 +495,6 @@ class DateTimeParser:
|
||||||
value: Any,
|
value: Any,
|
||||||
parts: _Parts,
|
parts: _Parts,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if token == "YYYY":
|
if token == "YYYY":
|
||||||
parts["year"] = int(value)
|
parts["year"] = int(value)
|
||||||
|
|
||||||
|
@ -508,7 +504,7 @@ class DateTimeParser:
|
||||||
|
|
||||||
elif token in ["MMMM", "MMM"]:
|
elif token in ["MMMM", "MMM"]:
|
||||||
# FIXME: month_number() is nullable
|
# FIXME: month_number() is nullable
|
||||||
parts["month"] = self.locale.month_number(value.lower()) # type: ignore
|
parts["month"] = self.locale.month_number(value.lower()) # type: ignore[typeddict-item]
|
||||||
|
|
||||||
elif token in ["MM", "M"]:
|
elif token in ["MM", "M"]:
|
||||||
parts["month"] = int(value)
|
parts["month"] = int(value)
|
||||||
|
@ -588,7 +584,6 @@ class DateTimeParser:
|
||||||
weekdate = parts.get("weekdate")
|
weekdate = parts.get("weekdate")
|
||||||
|
|
||||||
if weekdate is not None:
|
if weekdate is not None:
|
||||||
|
|
||||||
year, week = int(weekdate[0]), int(weekdate[1])
|
year, week = int(weekdate[0]), int(weekdate[1])
|
||||||
|
|
||||||
if weekdate[2] is not None:
|
if weekdate[2] is not None:
|
||||||
|
@ -712,7 +707,6 @@ class DateTimeParser:
|
||||||
)
|
)
|
||||||
|
|
||||||
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
|
def _parse_multiformat(self, string: str, formats: Iterable[str]) -> datetime:
|
||||||
|
|
||||||
_datetime: Optional[datetime] = None
|
_datetime: Optional[datetime] = None
|
||||||
|
|
||||||
for fmt in formats:
|
for fmt in formats:
|
||||||
|
@ -740,12 +734,11 @@ class DateTimeParser:
|
||||||
|
|
||||||
class TzinfoParser:
|
class TzinfoParser:
|
||||||
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
|
_TZINFO_RE: ClassVar[Pattern[str]] = re.compile(
|
||||||
r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$"
|
r"^(?:\(UTC)*([\+\-])?(\d{2})(?:\:?(\d{2}))?"
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse(cls, tzinfo_string: str) -> dt_tzinfo:
|
def parse(cls, tzinfo_string: str) -> dt_tzinfo:
|
||||||
|
|
||||||
tzinfo: Optional[dt_tzinfo] = None
|
tzinfo: Optional[dt_tzinfo] = None
|
||||||
|
|
||||||
if tzinfo_string == "local":
|
if tzinfo_string == "local":
|
||||||
|
@ -755,7 +748,6 @@ class TzinfoParser:
|
||||||
tzinfo = tz.tzutc()
|
tzinfo = tz.tzutc()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
|
||||||
iso_match = cls._TZINFO_RE.match(tzinfo_string)
|
iso_match = cls._TZINFO_RE.match(tzinfo_string)
|
||||||
|
|
||||||
if iso_match:
|
if iso_match:
|
||||||
|
|
|
@ -20,7 +20,7 @@ from functools import wraps
|
||||||
from inspect import signature
|
from inspect import signature
|
||||||
|
|
||||||
|
|
||||||
def _launch_forever_coro(coro, args, kwargs, loop):
|
async def _run_forever_coro(coro, args, kwargs, loop):
|
||||||
'''
|
'''
|
||||||
This helper function launches an async main function that was tagged with
|
This helper function launches an async main function that was tagged with
|
||||||
forever=True. There are two possibilities:
|
forever=True. There are two possibilities:
|
||||||
|
@ -48,7 +48,7 @@ def _launch_forever_coro(coro, args, kwargs, loop):
|
||||||
# forever=True feature from autoasync at some point in the future.
|
# forever=True feature from autoasync at some point in the future.
|
||||||
thing = coro(*args, **kwargs)
|
thing = coro(*args, **kwargs)
|
||||||
if iscoroutine(thing):
|
if iscoroutine(thing):
|
||||||
loop.create_task(thing)
|
await thing
|
||||||
|
|
||||||
|
|
||||||
def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
||||||
|
@ -127,7 +127,9 @@ def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
||||||
args, kwargs = bound_args.args, bound_args.kwargs
|
args, kwargs = bound_args.args, bound_args.kwargs
|
||||||
|
|
||||||
if forever:
|
if forever:
|
||||||
_launch_forever_coro(coro, args, kwargs, local_loop)
|
local_loop.create_task(_run_forever_coro(
|
||||||
|
coro, args, kwargs, local_loop
|
||||||
|
))
|
||||||
local_loop.run_forever()
|
local_loop.run_forever()
|
||||||
else:
|
else:
|
||||||
return local_loop.run_until_complete(coro(*args, **kwargs))
|
return local_loop.run_until_complete(coro(*args, **kwargs))
|
||||||
|
|
|
@ -26,6 +26,12 @@ def update_wrapper(
|
||||||
|
|
||||||
|
|
||||||
class _HashedSeq(list):
|
class _HashedSeq(list):
|
||||||
|
"""This class guarantees that hash() will be called no more than once
|
||||||
|
per element. This is important because the lru_cache() will hash
|
||||||
|
the key multiple times on a cache miss.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
__slots__ = 'hashvalue'
|
__slots__ = 'hashvalue'
|
||||||
|
|
||||||
def __init__(self, tup, hash=hash):
|
def __init__(self, tup, hash=hash):
|
||||||
|
@ -41,45 +47,57 @@ def _make_key(
|
||||||
kwds,
|
kwds,
|
||||||
typed,
|
typed,
|
||||||
kwd_mark=(object(),),
|
kwd_mark=(object(),),
|
||||||
fasttypes=set([int, str, frozenset, type(None)]),
|
fasttypes={int, str},
|
||||||
sorted=sorted,
|
|
||||||
tuple=tuple,
|
tuple=tuple,
|
||||||
type=type,
|
type=type,
|
||||||
len=len,
|
len=len,
|
||||||
):
|
):
|
||||||
'Make a cache key from optionally typed positional and keyword arguments'
|
"""Make a cache key from optionally typed positional and keyword arguments
|
||||||
|
|
||||||
|
The key is constructed in a way that is flat as possible rather than
|
||||||
|
as a nested structure that would take more memory.
|
||||||
|
|
||||||
|
If there is only a single argument and its data type is known to cache
|
||||||
|
its hash value, then that argument is returned without a wrapper. This
|
||||||
|
saves space and improves lookup speed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# All of code below relies on kwds preserving the order input by the user.
|
||||||
|
# Formerly, we sorted() the kwds before looping. The new way is *much*
|
||||||
|
# faster; however, it means that f(x=1, y=2) will now be treated as a
|
||||||
|
# distinct call from f(y=2, x=1) which will be cached separately.
|
||||||
key = args
|
key = args
|
||||||
if kwds:
|
if kwds:
|
||||||
sorted_items = sorted(kwds.items())
|
|
||||||
key += kwd_mark
|
key += kwd_mark
|
||||||
for item in sorted_items:
|
for item in kwds.items():
|
||||||
key += item
|
key += item
|
||||||
if typed:
|
if typed:
|
||||||
key += tuple(type(v) for v in args)
|
key += tuple(type(v) for v in args)
|
||||||
if kwds:
|
if kwds:
|
||||||
key += tuple(type(v) for k, v in sorted_items)
|
key += tuple(type(v) for v in kwds.values())
|
||||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||||
return key[0]
|
return key[0]
|
||||||
return _HashedSeq(key)
|
return _HashedSeq(key)
|
||||||
|
|
||||||
|
|
||||||
def lru_cache(maxsize=100, typed=False): # noqa: C901
|
def lru_cache(maxsize=128, typed=False):
|
||||||
"""Least-recently-used cache decorator.
|
"""Least-recently-used cache decorator.
|
||||||
|
|
||||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||||
can grow without bound.
|
can grow without bound.
|
||||||
|
|
||||||
If *typed* is True, arguments of different types will be cached separately.
|
If *typed* is True, arguments of different types will be cached separately.
|
||||||
For example, f(3.0) and f(3) will be treated as distinct calls with
|
For example, f(decimal.Decimal("3.0")) and f(3.0) will be treated as
|
||||||
distinct results.
|
distinct calls with distinct results. Some types such as str and int may
|
||||||
|
be cached separately even when typed is false.
|
||||||
|
|
||||||
Arguments to the cached function must be hashable.
|
Arguments to the cached function must be hashable.
|
||||||
|
|
||||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
View the cache statistics named tuple (hits, misses, maxsize, currsize)
|
||||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||||
Access the underlying function with f.__wrapped__.
|
Access the underlying function with f.__wrapped__.
|
||||||
|
|
||||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -88,108 +106,138 @@ def lru_cache(maxsize=100, typed=False): # noqa: C901
|
||||||
# The internals of the lru_cache are encapsulated for thread safety and
|
# The internals of the lru_cache are encapsulated for thread safety and
|
||||||
# to allow the implementation to change (including a possible C version).
|
# to allow the implementation to change (including a possible C version).
|
||||||
|
|
||||||
|
if isinstance(maxsize, int):
|
||||||
|
# Negative maxsize is treated as 0
|
||||||
|
if maxsize < 0:
|
||||||
|
maxsize = 0
|
||||||
|
elif callable(maxsize) and isinstance(typed, bool):
|
||||||
|
# The user_function was passed in directly via the maxsize argument
|
||||||
|
user_function, maxsize = maxsize, 128
|
||||||
|
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||||
|
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||||
|
return update_wrapper(wrapper, user_function)
|
||||||
|
elif maxsize is not None:
|
||||||
|
raise TypeError('Expected first argument to be an integer, a callable, or None')
|
||||||
|
|
||||||
def decorating_function(user_function):
|
def decorating_function(user_function):
|
||||||
cache = dict()
|
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
|
||||||
stats = [0, 0] # make statistics updateable non-locally
|
wrapper.cache_parameters = lambda: {'maxsize': maxsize, 'typed': typed}
|
||||||
HITS, MISSES = 0, 1 # names for the stats fields
|
|
||||||
make_key = _make_key
|
|
||||||
cache_get = cache.get # bound method to lookup key or return None
|
|
||||||
_len = len # localize the global len() function
|
|
||||||
lock = RLock() # because linkedlist updates aren't threadsafe
|
|
||||||
root = [] # root of the circular doubly linked list
|
|
||||||
root[:] = [root, root, None, None] # initialize by pointing to self
|
|
||||||
nonlocal_root = [root] # make updateable non-locally
|
|
||||||
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
|
||||||
|
|
||||||
if maxsize == 0:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# no caching, just do a statistics update after a successful call
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
stats[MISSES] += 1
|
|
||||||
return result
|
|
||||||
|
|
||||||
elif maxsize is None:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# simple caching without ordering or size limit
|
|
||||||
key = make_key(args, kwds, typed)
|
|
||||||
result = cache_get(
|
|
||||||
key, root
|
|
||||||
) # root used here as a unique not-found sentinel
|
|
||||||
if result is not root:
|
|
||||||
stats[HITS] += 1
|
|
||||||
return result
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
cache[key] = result
|
|
||||||
stats[MISSES] += 1
|
|
||||||
return result
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
# size limited caching that tracks accesses by recency
|
|
||||||
key = make_key(args, kwds, typed) if kwds or typed else args
|
|
||||||
with lock:
|
|
||||||
link = cache_get(key)
|
|
||||||
if link is not None:
|
|
||||||
# record recent use of the key by moving it
|
|
||||||
# to the front of the list
|
|
||||||
(root,) = nonlocal_root
|
|
||||||
link_prev, link_next, key, result = link
|
|
||||||
link_prev[NEXT] = link_next
|
|
||||||
link_next[PREV] = link_prev
|
|
||||||
last = root[PREV]
|
|
||||||
last[NEXT] = root[PREV] = link
|
|
||||||
link[PREV] = last
|
|
||||||
link[NEXT] = root
|
|
||||||
stats[HITS] += 1
|
|
||||||
return result
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
with lock:
|
|
||||||
(root,) = nonlocal_root
|
|
||||||
if key in cache:
|
|
||||||
# getting here means that this same key was added to the
|
|
||||||
# cache while the lock was released. since the link
|
|
||||||
# update is already done, we need only return the
|
|
||||||
# computed result and update the count of misses.
|
|
||||||
pass
|
|
||||||
elif _len(cache) >= maxsize:
|
|
||||||
# use the old root to store the new key and result
|
|
||||||
oldroot = root
|
|
||||||
oldroot[KEY] = key
|
|
||||||
oldroot[RESULT] = result
|
|
||||||
# empty the oldest link and make it the new root
|
|
||||||
root = nonlocal_root[0] = oldroot[NEXT]
|
|
||||||
oldkey = root[KEY]
|
|
||||||
root[KEY] = root[RESULT] = None
|
|
||||||
# now update the cache dictionary for the new links
|
|
||||||
del cache[oldkey]
|
|
||||||
cache[key] = oldroot
|
|
||||||
else:
|
|
||||||
# put result in a new link at the front of the list
|
|
||||||
last = root[PREV]
|
|
||||||
link = [last, root, key, result]
|
|
||||||
last[NEXT] = root[PREV] = cache[key] = link
|
|
||||||
stats[MISSES] += 1
|
|
||||||
return result
|
|
||||||
|
|
||||||
def cache_info():
|
|
||||||
"""Report cache statistics"""
|
|
||||||
with lock:
|
|
||||||
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
|
|
||||||
|
|
||||||
def cache_clear():
|
|
||||||
"""Clear the cache and cache statistics"""
|
|
||||||
with lock:
|
|
||||||
cache.clear()
|
|
||||||
root = nonlocal_root[0]
|
|
||||||
root[:] = [root, root, None, None]
|
|
||||||
stats[:] = [0, 0]
|
|
||||||
|
|
||||||
wrapper.__wrapped__ = user_function
|
|
||||||
wrapper.cache_info = cache_info
|
|
||||||
wrapper.cache_clear = cache_clear
|
|
||||||
return update_wrapper(wrapper, user_function)
|
return update_wrapper(wrapper, user_function)
|
||||||
|
|
||||||
return decorating_function
|
return decorating_function
|
||||||
|
|
||||||
|
|
||||||
|
def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
|
||||||
|
# Constants shared by all lru cache instances:
|
||||||
|
sentinel = object() # unique object used to signal cache misses
|
||||||
|
make_key = _make_key # build a key from the function arguments
|
||||||
|
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
|
||||||
|
|
||||||
|
cache = {}
|
||||||
|
hits = misses = 0
|
||||||
|
full = False
|
||||||
|
cache_get = cache.get # bound method to lookup a key or return None
|
||||||
|
cache_len = cache.__len__ # get cache size without calling len()
|
||||||
|
lock = RLock() # because linkedlist updates aren't threadsafe
|
||||||
|
root = [] # root of the circular doubly linked list
|
||||||
|
root[:] = [root, root, None, None] # initialize by pointing to self
|
||||||
|
|
||||||
|
if maxsize == 0:
|
||||||
|
|
||||||
|
def wrapper(*args, **kwds):
|
||||||
|
# No caching -- just a statistics update
|
||||||
|
nonlocal misses
|
||||||
|
misses += 1
|
||||||
|
result = user_function(*args, **kwds)
|
||||||
|
return result
|
||||||
|
|
||||||
|
elif maxsize is None:
|
||||||
|
|
||||||
|
def wrapper(*args, **kwds):
|
||||||
|
# Simple caching without ordering or size limit
|
||||||
|
nonlocal hits, misses
|
||||||
|
key = make_key(args, kwds, typed)
|
||||||
|
result = cache_get(key, sentinel)
|
||||||
|
if result is not sentinel:
|
||||||
|
hits += 1
|
||||||
|
return result
|
||||||
|
misses += 1
|
||||||
|
result = user_function(*args, **kwds)
|
||||||
|
cache[key] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def wrapper(*args, **kwds):
|
||||||
|
# Size limited caching that tracks accesses by recency
|
||||||
|
nonlocal root, hits, misses, full
|
||||||
|
key = make_key(args, kwds, typed)
|
||||||
|
with lock:
|
||||||
|
link = cache_get(key)
|
||||||
|
if link is not None:
|
||||||
|
# Move the link to the front of the circular queue
|
||||||
|
link_prev, link_next, _key, result = link
|
||||||
|
link_prev[NEXT] = link_next
|
||||||
|
link_next[PREV] = link_prev
|
||||||
|
last = root[PREV]
|
||||||
|
last[NEXT] = root[PREV] = link
|
||||||
|
link[PREV] = last
|
||||||
|
link[NEXT] = root
|
||||||
|
hits += 1
|
||||||
|
return result
|
||||||
|
misses += 1
|
||||||
|
result = user_function(*args, **kwds)
|
||||||
|
with lock:
|
||||||
|
if key in cache:
|
||||||
|
# Getting here means that this same key was added to the
|
||||||
|
# cache while the lock was released. Since the link
|
||||||
|
# update is already done, we need only return the
|
||||||
|
# computed result and update the count of misses.
|
||||||
|
pass
|
||||||
|
elif full:
|
||||||
|
# Use the old root to store the new key and result.
|
||||||
|
oldroot = root
|
||||||
|
oldroot[KEY] = key
|
||||||
|
oldroot[RESULT] = result
|
||||||
|
# Empty the oldest link and make it the new root.
|
||||||
|
# Keep a reference to the old key and old result to
|
||||||
|
# prevent their ref counts from going to zero during the
|
||||||
|
# update. That will prevent potentially arbitrary object
|
||||||
|
# clean-up code (i.e. __del__) from running while we're
|
||||||
|
# still adjusting the links.
|
||||||
|
root = oldroot[NEXT]
|
||||||
|
oldkey = root[KEY]
|
||||||
|
root[KEY] = root[RESULT] = None
|
||||||
|
# Now update the cache dictionary.
|
||||||
|
del cache[oldkey]
|
||||||
|
# Save the potentially reentrant cache[key] assignment
|
||||||
|
# for last, after the root and links have been put in
|
||||||
|
# a consistent state.
|
||||||
|
cache[key] = oldroot
|
||||||
|
else:
|
||||||
|
# Put result in a new link at the front of the queue.
|
||||||
|
last = root[PREV]
|
||||||
|
link = [last, root, key, result]
|
||||||
|
last[NEXT] = root[PREV] = cache[key] = link
|
||||||
|
# Use the cache_len bound method instead of the len() function
|
||||||
|
# which could potentially be wrapped in an lru_cache itself.
|
||||||
|
full = cache_len() >= maxsize
|
||||||
|
return result
|
||||||
|
|
||||||
|
def cache_info():
|
||||||
|
"""Report cache statistics"""
|
||||||
|
with lock:
|
||||||
|
return _CacheInfo(hits, misses, maxsize, cache_len())
|
||||||
|
|
||||||
|
def cache_clear():
|
||||||
|
"""Clear the cache and cache statistics"""
|
||||||
|
nonlocal hits, misses, full
|
||||||
|
with lock:
|
||||||
|
cache.clear()
|
||||||
|
root[:] = [root, root, None, None]
|
||||||
|
hits = misses = 0
|
||||||
|
full = False
|
||||||
|
|
||||||
|
wrapper.cache_info = cache_info
|
||||||
|
wrapper.cache_clear = cache_clear
|
||||||
|
return wrapper
|
||||||
|
|
|
@ -11,9 +11,9 @@ from bleach.sanitizer import (
|
||||||
|
|
||||||
|
|
||||||
# yyyymmdd
|
# yyyymmdd
|
||||||
__releasedate__ = "20230123"
|
__releasedate__ = "20231006"
|
||||||
# x.y.z or x.y.z.dev0 -- semver
|
# x.y.z or x.y.z.dev0 -- semver
|
||||||
__version__ = "6.0.0"
|
__version__ = "6.1.0"
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["clean", "linkify"]
|
__all__ = ["clean", "linkify"]
|
||||||
|
|
|
@ -395,10 +395,17 @@ class BleachHTMLTokenizer(HTMLTokenizer):
|
||||||
# followed by a series of characters. It's treated as a tag
|
# followed by a series of characters. It's treated as a tag
|
||||||
# name that abruptly ends, but we should treat that like
|
# name that abruptly ends, but we should treat that like
|
||||||
# character data
|
# character data
|
||||||
yield {
|
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||||
"type": TAG_TOKEN_TYPE_CHARACTERS,
|
elif last_error_token["data"] in (
|
||||||
"data": "<" + self.currentToken["name"],
|
"eof-in-attribute-name",
|
||||||
}
|
"eof-in-attribute-value-no-quotes",
|
||||||
|
):
|
||||||
|
# Handle the case where the text being parsed ends with <
|
||||||
|
# followed by a series of characters and then space and then
|
||||||
|
# more characters. It's treated as a tag name followed by an
|
||||||
|
# attribute that abruptly ends, but we should treat that like
|
||||||
|
# character data.
|
||||||
|
yield {"type": TAG_TOKEN_TYPE_CHARACTERS, "data": self.stream.get_tag()}
|
||||||
else:
|
else:
|
||||||
yield last_error_token
|
yield last_error_token
|
||||||
|
|
||||||
|
|
|
@ -45,8 +45,8 @@ def build_url_re(tlds=TLDS, protocols=html5lib_shim.allowed_protocols):
|
||||||
r"""\(* # Match any opening parentheses.
|
r"""\(* # Match any opening parentheses.
|
||||||
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
|
\b(?<![@.])(?:(?:{0}):/{{0,3}}(?:(?:\w+:)?\w+@)?)? # http://
|
||||||
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
([\w-]+\.)+(?:{1})(?:\:[0-9]+)?(?!\.\w)\b # xx.yy.tld(:##)?
|
||||||
(?:[/?][^\s\{{\}}\|\\\^\[\]`<>"]*)?
|
(?:[/?][^\s\{{\}}\|\\\^`<>"]*)?
|
||||||
# /path/zz (excluding "unsafe" chars from RFC 1738,
|
# /path/zz (excluding "unsafe" chars from RFC 3986,
|
||||||
# except for # and ~, which happen in practice)
|
# except for # and ~, which happen in practice)
|
||||||
""".format(
|
""".format(
|
||||||
"|".join(sorted(protocols)), "|".join(sorted(tlds))
|
"|".join(sorted(protocols)), "|".join(sorted(tlds))
|
||||||
|
@ -591,7 +591,7 @@ class LinkifyFilter(html5lib_shim.Filter):
|
||||||
in_a = False
|
in_a = False
|
||||||
token_buffer = []
|
token_buffer = []
|
||||||
else:
|
else:
|
||||||
token_buffer.append(token)
|
token_buffer.extend(list(self.extract_entities(token)))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if token["type"] in ["StartTag", "EmptyTag"]:
|
if token["type"] in ["StartTag", "EmptyTag"]:
|
||||||
|
|
|
@ -15,8 +15,8 @@ documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||||
__version__ = "4.12.2"
|
__version__ = "4.12.3"
|
||||||
__copyright__ = "Copyright (c) 2004-2023 Leonard Richardson"
|
__copyright__ = "Copyright (c) 2004-2024 Leonard Richardson"
|
||||||
# Use of this source code is governed by the MIT license.
|
# Use of this source code is governed by the MIT license.
|
||||||
__license__ = "MIT"
|
__license__ = "MIT"
|
||||||
|
|
||||||
|
|
|
@ -514,15 +514,19 @@ class DetectsXMLParsedAsHTML(object):
|
||||||
XML_PREFIX_B = b'<?xml'
|
XML_PREFIX_B = b'<?xml'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def warn_if_markup_looks_like_xml(cls, markup):
|
def warn_if_markup_looks_like_xml(cls, markup, stacklevel=3):
|
||||||
"""Perform a check on some markup to see if it looks like XML
|
"""Perform a check on some markup to see if it looks like XML
|
||||||
that's not XHTML. If so, issue a warning.
|
that's not XHTML. If so, issue a warning.
|
||||||
|
|
||||||
This is much less reliable than doing the check while parsing,
|
This is much less reliable than doing the check while parsing,
|
||||||
but some of the tree builders can't do that.
|
but some of the tree builders can't do that.
|
||||||
|
|
||||||
|
:param stacklevel: The stacklevel of the code calling this
|
||||||
|
function.
|
||||||
|
|
||||||
:return: True if the markup looks like non-XHTML XML, False
|
:return: True if the markup looks like non-XHTML XML, False
|
||||||
otherwise.
|
otherwise.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(markup, bytes):
|
if isinstance(markup, bytes):
|
||||||
prefix = cls.XML_PREFIX_B
|
prefix = cls.XML_PREFIX_B
|
||||||
|
@ -535,15 +539,16 @@ class DetectsXMLParsedAsHTML(object):
|
||||||
and markup.startswith(prefix)
|
and markup.startswith(prefix)
|
||||||
and not looks_like_html.search(markup[:500])
|
and not looks_like_html.search(markup[:500])
|
||||||
):
|
):
|
||||||
cls._warn()
|
cls._warn(stacklevel=stacklevel+2)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _warn(cls):
|
def _warn(cls, stacklevel=5):
|
||||||
"""Issue a warning about XML being parsed as HTML."""
|
"""Issue a warning about XML being parsed as HTML."""
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning
|
XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning,
|
||||||
|
stacklevel=stacklevel
|
||||||
)
|
)
|
||||||
|
|
||||||
def _initialize_xml_detector(self):
|
def _initialize_xml_detector(self):
|
||||||
|
|
|
@ -77,7 +77,9 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||||
|
|
||||||
# html5lib only parses HTML, so if it's given XML that's worth
|
# html5lib only parses HTML, so if it's given XML that's worth
|
||||||
# noting.
|
# noting.
|
||||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(markup)
|
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
|
||||||
|
markup, stacklevel=3
|
||||||
|
)
|
||||||
|
|
||||||
yield (markup, None, None, False)
|
yield (markup, None, None, False)
|
||||||
|
|
||||||
|
|
|
@ -378,10 +378,10 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||||
parser.soup = self.soup
|
parser.soup = self.soup
|
||||||
try:
|
try:
|
||||||
parser.feed(markup)
|
parser.feed(markup)
|
||||||
|
parser.close()
|
||||||
except AssertionError as e:
|
except AssertionError as e:
|
||||||
# html.parser raises AssertionError in rare cases to
|
# html.parser raises AssertionError in rare cases to
|
||||||
# indicate a fatal problem with the markup, especially
|
# indicate a fatal problem with the markup, especially
|
||||||
# when there's an error in the doctype declaration.
|
# when there's an error in the doctype declaration.
|
||||||
raise ParserRejectedMarkup(e)
|
raise ParserRejectedMarkup(e)
|
||||||
parser.close()
|
|
||||||
parser.already_closed_empty_element = []
|
parser.already_closed_empty_element = []
|
||||||
|
|
|
@ -179,7 +179,9 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||||
self.processing_instruction_class = ProcessingInstruction
|
self.processing_instruction_class = ProcessingInstruction
|
||||||
# We're in HTML mode, so if we're given XML, that's worth
|
# We're in HTML mode, so if we're given XML, that's worth
|
||||||
# noting.
|
# noting.
|
||||||
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(markup)
|
DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml(
|
||||||
|
markup, stacklevel=3
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.processing_instruction_class = XMLProcessingInstruction
|
self.processing_instruction_class = XMLProcessingInstruction
|
||||||
|
|
||||||
|
|
|
@ -1356,7 +1356,7 @@ class Tag(PageElement):
|
||||||
This is the first step in the deepcopy process.
|
This is the first step in the deepcopy process.
|
||||||
"""
|
"""
|
||||||
clone = type(self)(
|
clone = type(self)(
|
||||||
None, self.builder, self.name, self.namespace,
|
None, None, self.name, self.namespace,
|
||||||
self.prefix, self.attrs, is_xml=self._is_xml,
|
self.prefix, self.attrs, is_xml=self._is_xml,
|
||||||
sourceline=self.sourceline, sourcepos=self.sourcepos,
|
sourceline=self.sourceline, sourcepos=self.sourcepos,
|
||||||
can_be_empty_element=self.can_be_empty_element,
|
can_be_empty_element=self.can_be_empty_element,
|
||||||
|
@ -1845,6 +1845,11 @@ class Tag(PageElement):
|
||||||
return space_before + s + space_after
|
return space_before + s + space_after
|
||||||
|
|
||||||
def _format_tag(self, eventual_encoding, formatter, opening):
|
def _format_tag(self, eventual_encoding, formatter, opening):
|
||||||
|
if self.hidden:
|
||||||
|
# A hidden tag is invisible, although its contents
|
||||||
|
# are visible.
|
||||||
|
return ''
|
||||||
|
|
||||||
# A tag starts with the < character (see below).
|
# A tag starts with the < character (see below).
|
||||||
|
|
||||||
# Then the / character, if this is a closing tag.
|
# Then the / character, if this is a closing tag.
|
||||||
|
|
|
@ -51,7 +51,7 @@ class Formatter(EntitySubstitution):
|
||||||
void_element_close_prefix='/', cdata_containing_tags=None,
|
void_element_close_prefix='/', cdata_containing_tags=None,
|
||||||
empty_attributes_are_booleans=False, indent=1,
|
empty_attributes_are_booleans=False, indent=1,
|
||||||
):
|
):
|
||||||
"""Constructor.
|
r"""Constructor.
|
||||||
|
|
||||||
:param language: This should be Formatter.XML if you are formatting
|
:param language: This should be Formatter.XML if you are formatting
|
||||||
XML markup and Formatter.HTML if you are formatting HTML markup.
|
XML markup and Formatter.HTML if you are formatting HTML markup.
|
||||||
|
@ -76,7 +76,7 @@ class Formatter(EntitySubstitution):
|
||||||
negative, or "" will only insert newlines. Using a
|
negative, or "" will only insert newlines. Using a
|
||||||
positive integer indent indents that many spaces per
|
positive integer indent indents that many spaces per
|
||||||
level. If indent is a string (such as "\t"), that string
|
level. If indent is a string (such as "\t"), that string
|
||||||
is used to indent each level. The default behavior to
|
is used to indent each level. The default behavior is to
|
||||||
indent one space per level.
|
indent one space per level.
|
||||||
"""
|
"""
|
||||||
self.language = language
|
self.language = language
|
||||||
|
|
|
@ -1105,7 +1105,7 @@ class XMLTreeBuilderSmokeTest(TreeBuilderSmokeTest):
|
||||||
doc = """<?xml version="1.0" encoding="utf-8"?>
|
doc = """<?xml version="1.0" encoding="utf-8"?>
|
||||||
<Document xmlns="http://example.com/ns0"
|
<Document xmlns="http://example.com/ns0"
|
||||||
xmlns:ns1="http://example.com/ns1"
|
xmlns:ns1="http://example.com/ns1"
|
||||||
xmlns:ns2="http://example.com/ns2"
|
xmlns:ns2="http://example.com/ns2">
|
||||||
<ns1:tag>foo</ns1:tag>
|
<ns1:tag>foo</ns1:tag>
|
||||||
<ns1:tag>bar</ns1:tag>
|
<ns1:tag>bar</ns1:tag>
|
||||||
<ns2:tag key="value">baz</ns2:tag>
|
<ns2:tag key="value">baz</ns2:tag>
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
<20><> <20> <css
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
˙ ><applet></applet><applet></applet><apple|><applet><applet><appl›„><applet><applet></applet></applet></applet></applet><applet></applet><apple>t<applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet>et><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><azplet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><plet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet><applet></applet></applet></applet></applet></appt></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet></applet><<meta charset=utf-8>
|
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
- ˙˙ <math><select><mi><select><select>t
|
Binary file not shown.
|
@ -14,30 +14,75 @@ from bs4 import (
|
||||||
BeautifulSoup,
|
BeautifulSoup,
|
||||||
ParserRejectedMarkup,
|
ParserRejectedMarkup,
|
||||||
)
|
)
|
||||||
|
try:
|
||||||
|
from soupsieve.util import SelectorSyntaxError
|
||||||
|
import lxml
|
||||||
|
import html5lib
|
||||||
|
fully_fuzzable = True
|
||||||
|
except ImportError:
|
||||||
|
fully_fuzzable = False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not fully_fuzzable, reason="Prerequisites for fuzz tests are not installed.")
|
||||||
class TestFuzz(object):
|
class TestFuzz(object):
|
||||||
|
|
||||||
# Test case markup files from fuzzers are given this extension so
|
# Test case markup files from fuzzers are given this extension so
|
||||||
# they can be included in builds.
|
# they can be included in builds.
|
||||||
TESTCASE_SUFFIX = ".testcase"
|
TESTCASE_SUFFIX = ".testcase"
|
||||||
|
|
||||||
|
# Copied 20230512 from
|
||||||
|
# https://github.com/google/oss-fuzz/blob/4ac6a645a197a695fe76532251feb5067076b3f3/projects/bs4/bs4_fuzzer.py
|
||||||
|
#
|
||||||
|
# Copying the code lets us precisely duplicate the behavior of
|
||||||
|
# oss-fuzz. The downside is that this code changes over time, so
|
||||||
|
# multiple copies of the code must be kept around to run against
|
||||||
|
# older tests. I'm not sure what to do about this, but I may
|
||||||
|
# retire old tests after a time.
|
||||||
|
def fuzz_test_with_css(self, filename):
|
||||||
|
data = self.__markup(filename)
|
||||||
|
parsers = ['lxml-xml', 'html5lib', 'html.parser', 'lxml']
|
||||||
|
try:
|
||||||
|
idx = int(data[0]) % len(parsers)
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
|
css_selector, data = data[1:10], data[10:]
|
||||||
|
|
||||||
|
try:
|
||||||
|
soup = BeautifulSoup(data[1:], features=parsers[idx])
|
||||||
|
except ParserRejectedMarkup:
|
||||||
|
return
|
||||||
|
except ValueError:
|
||||||
|
return
|
||||||
|
|
||||||
|
list(soup.find_all(True))
|
||||||
|
try:
|
||||||
|
soup.css.select(css_selector.decode('utf-8', 'replace'))
|
||||||
|
except SelectorSyntaxError:
|
||||||
|
return
|
||||||
|
soup.prettify()
|
||||||
|
|
||||||
# This class of error has been fixed by catching a less helpful
|
# This class of error has been fixed by catching a less helpful
|
||||||
# exception from html.parser and raising ParserRejectedMarkup
|
# exception from html.parser and raising ParserRejectedMarkup
|
||||||
# instead.
|
# instead.
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"filename", [
|
"filename", [
|
||||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912",
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5703933063462912",
|
||||||
|
"crash-ffbdfa8a2b26f13537b68d3794b0478a4090ee4a",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_rejected_markup(self, filename):
|
def test_rejected_markup(self, filename):
|
||||||
markup = self.__markup(filename)
|
markup = self.__markup(filename)
|
||||||
with pytest.raises(ParserRejectedMarkup):
|
with pytest.raises(ParserRejectedMarkup):
|
||||||
BeautifulSoup(markup, 'html.parser')
|
BeautifulSoup(markup, 'html.parser')
|
||||||
|
|
||||||
# This class of error has to do with very deeply nested documents
|
# This class of error has to do with very deeply nested documents
|
||||||
# which overflow the Python call stack when the tree is converted
|
# which overflow the Python call stack when the tree is converted
|
||||||
# to a string. This is an issue with Beautiful Soup which was fixed
|
# to a string. This is an issue with Beautiful Soup which was fixed
|
||||||
# as part of [bug=1471755].
|
# as part of [bug=1471755].
|
||||||
|
#
|
||||||
|
# These test cases are in the older format that doesn't specify
|
||||||
|
# which parser to use or give a CSS selector.
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"filename", [
|
"filename", [
|
||||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440",
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5984173902397440",
|
||||||
|
@ -46,18 +91,44 @@ class TestFuzz(object):
|
||||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400",
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6450958476902400",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_deeply_nested_document(self, filename):
|
def test_deeply_nested_document_without_css(self, filename):
|
||||||
# Parsing the document and encoding it back to a string is
|
# Parsing the document and encoding it back to a string is
|
||||||
# sufficient to demonstrate that the overflow problem has
|
# sufficient to demonstrate that the overflow problem has
|
||||||
# been fixed.
|
# been fixed.
|
||||||
markup = self.__markup(filename)
|
markup = self.__markup(filename)
|
||||||
BeautifulSoup(markup, 'html.parser').encode()
|
BeautifulSoup(markup, 'html.parser').encode()
|
||||||
|
|
||||||
|
# This class of error has to do with very deeply nested documents
|
||||||
|
# which overflow the Python call stack when the tree is converted
|
||||||
|
# to a string. This is an issue with Beautiful Soup which was fixed
|
||||||
|
# as part of [bug=1471755].
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5000587759190016",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5375146639360000",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5492400320282624",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_deeply_nested_document(self, filename):
|
||||||
|
self.fuzz_test_with_css(filename)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-4670634698080256",
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5270998950477824",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_soupsieve_errors(self, filename):
|
||||||
|
self.fuzz_test_with_css(filename)
|
||||||
|
|
||||||
# This class of error represents problems with html5lib's parser,
|
# This class of error represents problems with html5lib's parser,
|
||||||
# not Beautiful Soup. I use
|
# not Beautiful Soup. I use
|
||||||
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
||||||
# the html5lib developers of these issues.
|
# the html5lib developers of these issues.
|
||||||
@pytest.mark.skip("html5lib problems")
|
#
|
||||||
|
# These test cases are in the older format that doesn't specify
|
||||||
|
# which parser to use or give a CSS selector.
|
||||||
|
@pytest.mark.skip(reason="html5lib-specific problems")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"filename", [
|
"filename", [
|
||||||
# b"""ÿ<!DOCTyPEV PUBLIC'''Ð'"""
|
# b"""ÿ<!DOCTyPEV PUBLIC'''Ð'"""
|
||||||
|
@ -68,7 +139,7 @@ class TestFuzz(object):
|
||||||
|
|
||||||
# b'-<math><sElect><mi><sElect><sElect>'
|
# b'-<math><sElect><mi><sElect><sElect>'
|
||||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896",
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-5843991618256896",
|
||||||
|
|
||||||
# b'ñ<table><svg><html>'
|
# b'ñ<table><svg><html>'
|
||||||
"clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224",
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6241471367348224",
|
||||||
|
|
||||||
|
@ -79,10 +150,24 @@ class TestFuzz(object):
|
||||||
"crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08"
|
"crash-0d306a50c8ed8bcd0785b67000fcd5dea1d33f08"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_html5lib_parse_errors(self, filename):
|
def test_html5lib_parse_errors_without_css(self, filename):
|
||||||
markup = self.__markup(filename)
|
markup = self.__markup(filename)
|
||||||
print(BeautifulSoup(markup, 'html5lib').encode())
|
print(BeautifulSoup(markup, 'html5lib').encode())
|
||||||
|
|
||||||
|
# This class of error represents problems with html5lib's parser,
|
||||||
|
# not Beautiful Soup. I use
|
||||||
|
# https://github.com/html5lib/html5lib-python/issues/568 to notify
|
||||||
|
# the html5lib developers of these issues.
|
||||||
|
@pytest.mark.skip(reason="html5lib-specific problems")
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filename", [
|
||||||
|
# b'- \xff\xff <math>\x10<select><mi><select><select>t'
|
||||||
|
"clusterfuzz-testcase-minimized-bs4_fuzzer-6306874195312640",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_html5lib_parse_errors(self, filename):
|
||||||
|
self.fuzz_test_with_css(filename)
|
||||||
|
|
||||||
def __markup(self, filename):
|
def __markup(self, filename):
|
||||||
if not filename.endswith(self.TESTCASE_SUFFIX):
|
if not filename.endswith(self.TESTCASE_SUFFIX):
|
||||||
filename += self.TESTCASE_SUFFIX
|
filename += self.TESTCASE_SUFFIX
|
||||||
|
|
|
@ -219,3 +219,16 @@ class TestMultiValuedAttributes(SoupTest):
|
||||||
)
|
)
|
||||||
assert soup.a['class'] == 'foo'
|
assert soup.a['class'] == 'foo'
|
||||||
assert soup.a['id'] == ['bar']
|
assert soup.a['id'] == ['bar']
|
||||||
|
|
||||||
|
def test_hidden_tag_is_invisible(self):
|
||||||
|
# Setting .hidden on a tag makes it invisible in output, but
|
||||||
|
# leaves its contents visible.
|
||||||
|
#
|
||||||
|
# This is not a documented or supported feature of Beautiful
|
||||||
|
# Soup (e.g. NavigableString doesn't support .hidden even
|
||||||
|
# though it could), but some people use it and it's not
|
||||||
|
# hurting anything to verify that it keeps working.
|
||||||
|
#
|
||||||
|
soup = self.soup('<div id="1"><span id="2">a string</span></div>')
|
||||||
|
soup.span.hidden = True
|
||||||
|
assert '<div id="1">a string</div>' == str(soup.div)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from .core import contents, where
|
from .core import contents, where
|
||||||
|
|
||||||
__all__ = ["contents", "where"]
|
__all__ = ["contents", "where"]
|
||||||
__version__ = "2023.07.22"
|
__version__ = "2024.02.02"
|
||||||
|
|
|
@ -245,34 +245,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
|
||||||
4SVhM7JZG+Ju1zdXtg2pEto=
|
4SVhM7JZG+Ju1zdXtg2pEto=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
|
|
||||||
# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
|
|
||||||
# Label: "Security Communication Root CA"
|
|
||||||
# Serial: 0
|
|
||||||
# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
|
|
||||||
# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
|
|
||||||
# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
|
|
||||||
MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
|
|
||||||
dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
|
|
||||||
WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
|
|
||||||
VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
|
|
||||||
DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
|
|
||||||
9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
|
|
||||||
DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
|
|
||||||
Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
|
|
||||||
QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
|
|
||||||
xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
|
|
||||||
A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
|
|
||||||
AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
|
|
||||||
kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
|
|
||||||
Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
|
|
||||||
Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
|
|
||||||
JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
|
|
||||||
RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||||
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
|
||||||
# Label: "XRamp Global CA Root"
|
# Label: "XRamp Global CA Root"
|
||||||
|
@ -881,49 +853,6 @@ Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
|
||||||
WD9f
|
WD9f
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
|
||||||
# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
|
|
||||||
# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
|
|
||||||
# Serial: 6047274297262753887
|
|
||||||
# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
|
|
||||||
# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
|
|
||||||
# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
|
|
||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
|
|
||||||
BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
|
|
||||||
cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
|
|
||||||
MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
|
|
||||||
Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
|
|
||||||
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
|
|
||||||
thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
|
|
||||||
cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
|
|
||||||
L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
|
|
||||||
NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
|
|
||||||
X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
|
|
||||||
m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
|
|
||||||
Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
|
|
||||||
EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
|
|
||||||
KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
|
|
||||||
6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
|
|
||||||
OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
|
|
||||||
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
|
|
||||||
VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
|
|
||||||
cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
|
|
||||||
ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
|
|
||||||
AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
|
|
||||||
661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
|
|
||||||
am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
|
|
||||||
ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
|
|
||||||
PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
|
|
||||||
3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
|
|
||||||
SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
|
|
||||||
3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
|
|
||||||
ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
|
|
||||||
StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
|
|
||||||
Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
|
|
||||||
jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
|
|
||||||
# Issuer: CN=Izenpe.com O=IZENPE S.A.
|
# Issuer: CN=Izenpe.com O=IZENPE S.A.
|
||||||
# Subject: CN=Izenpe.com O=IZENPE S.A.
|
# Subject: CN=Izenpe.com O=IZENPE S.A.
|
||||||
# Label: "Izenpe.com"
|
# Label: "Izenpe.com"
|
||||||
|
@ -4633,3 +4562,253 @@ o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
|
||||||
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
||||||
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||||
|
# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
|
||||||
|
# Label: "TrustAsia Global Root CA G3"
|
||||||
|
# Serial: 576386314500428537169965010905813481816650257167
|
||||||
|
# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
|
||||||
|
# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
|
||||||
|
# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
|
||||||
|
BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
|
||||||
|
ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
|
||||||
|
Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
|
||||||
|
IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
|
||||||
|
cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
|
||||||
|
DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
|
||||||
|
T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
|
||||||
|
AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
|
||||||
|
nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
|
||||||
|
qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
|
||||||
|
yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
|
||||||
|
hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
|
||||||
|
zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
|
||||||
|
kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
|
||||||
|
f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
|
||||||
|
uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
|
||||||
|
o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
|
||||||
|
MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
|
||||||
|
BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
|
||||||
|
wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
|
||||||
|
XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
|
||||||
|
JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
|
||||||
|
ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
|
||||||
|
VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
|
||||||
|
xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
|
||||||
|
AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
|
||||||
|
7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
|
||||||
|
gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
|
||||||
|
+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
|
||||||
|
FGWsJwt0ivKH
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||||
|
# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
|
||||||
|
# Label: "TrustAsia Global Root CA G4"
|
||||||
|
# Serial: 451799571007117016466790293371524403291602933463
|
||||||
|
# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
|
||||||
|
# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
|
||||||
|
# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
|
||||||
|
WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
|
||||||
|
IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
|
||||||
|
MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
|
||||||
|
VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
|
||||||
|
dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
|
||||||
|
s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
|
||||||
|
LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
|
||||||
|
YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
|
||||||
|
pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
|
||||||
|
AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
|
||||||
|
UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
|
||||||
|
/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||||
|
# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
|
||||||
|
# Label: "CommScope Public Trust ECC Root-01"
|
||||||
|
# Serial: 385011430473757362783587124273108818652468453534
|
||||||
|
# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
|
||||||
|
# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
|
||||||
|
# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
|
||||||
|
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||||
|
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
|
||||||
|
Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||||
|
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
|
||||||
|
djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
|
||||||
|
flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
|
||||||
|
hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||||
|
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
|
||||||
|
hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
|
||||||
|
2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
|
||||||
|
Um9poIyNStDuiw7LR47QjRE=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||||
|
# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
|
||||||
|
# Label: "CommScope Public Trust ECC Root-02"
|
||||||
|
# Serial: 234015080301808452132356021271193974922492992893
|
||||||
|
# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
|
||||||
|
# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
|
||||||
|
# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
|
||||||
|
TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
|
||||||
|
bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
|
||||||
|
Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
|
||||||
|
cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
|
||||||
|
djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
|
||||||
|
j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
|
||||||
|
v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||||
|
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
|
||||||
|
hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
|
||||||
|
ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
|
||||||
|
mkzw5l4lIhVtwodZ0LKOag==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||||
|
# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
|
||||||
|
# Label: "CommScope Public Trust RSA Root-01"
|
||||||
|
# Serial: 354030733275608256394402989253558293562031411421
|
||||||
|
# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
|
||||||
|
# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
|
||||||
|
# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
|
||||||
|
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||||
|
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
|
||||||
|
NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||||
|
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||||
|
MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
|
||||||
|
YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
|
||||||
|
suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
|
||||||
|
DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
|
||||||
|
WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
|
||||||
|
P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
|
||||||
|
KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
|
||||||
|
UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
|
||||||
|
kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
|
||||||
|
Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
|
||||||
|
Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
|
||||||
|
CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||||
|
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
|
||||||
|
KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
|
||||||
|
NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
|
||||||
|
nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
|
||||||
|
QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
|
||||||
|
trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
|
||||||
|
aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
|
||||||
|
j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
|
||||||
|
Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
|
||||||
|
lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
|
||||||
|
YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
|
||||||
|
icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||||
|
# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
|
||||||
|
# Label: "CommScope Public Trust RSA Root-02"
|
||||||
|
# Serial: 480062499834624527752716769107743131258796508494
|
||||||
|
# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
|
||||||
|
# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
|
||||||
|
# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
|
||||||
|
BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
|
||||||
|
Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
|
||||||
|
NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
|
||||||
|
U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
|
||||||
|
MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
|
||||||
|
NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
|
||||||
|
kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
|
||||||
|
rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
|
||||||
|
hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
|
||||||
|
LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
|
||||||
|
n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
|
||||||
|
FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
|
||||||
|
kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
|
||||||
|
wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
|
||||||
|
wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
|
||||||
|
5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
|
||||||
|
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
|
||||||
|
KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
|
||||||
|
KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
|
||||||
|
+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
|
||||||
|
APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
|
||||||
|
pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
|
||||||
|
6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
|
||||||
|
sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
|
||||||
|
PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
|
||||||
|
lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
|
||||||
|
v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
|
||||||
|
rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||||
|
# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
|
||||||
|
# Label: "Telekom Security TLS ECC Root 2020"
|
||||||
|
# Serial: 72082518505882327255703894282316633856
|
||||||
|
# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
|
||||||
|
# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
|
||||||
|
# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
|
||||||
|
CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
|
||||||
|
bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
|
||||||
|
MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
|
||||||
|
JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
|
||||||
|
AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
|
||||||
|
AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
|
||||||
|
tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
|
||||||
|
f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
|
||||||
|
MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
|
||||||
|
MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
|
||||||
|
z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
|
||||||
|
27iQ7t0l
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
||||||
|
# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||||
|
# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
|
||||||
|
# Label: "Telekom Security TLS RSA Root 2023"
|
||||||
|
# Serial: 44676229530606711399881795178081572759
|
||||||
|
# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
|
||||||
|
# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
|
||||||
|
# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
|
||||||
|
MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
|
||||||
|
eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
|
||||||
|
MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
|
||||||
|
REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
|
||||||
|
A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
|
||||||
|
KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
|
||||||
|
cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
|
||||||
|
cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
|
||||||
|
U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
|
||||||
|
Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
|
||||||
|
BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
|
||||||
|
8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
|
||||||
|
co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
|
||||||
|
8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
|
||||||
|
rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
|
||||||
|
mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
|
||||||
|
+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
|
||||||
|
gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
|
||||||
|
p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
|
||||||
|
pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
|
||||||
|
9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
|
||||||
|
M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
|
||||||
|
GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
|
||||||
|
CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
|
||||||
|
xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
|
||||||
|
w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
|
||||||
|
L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
|
||||||
|
X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
|
||||||
|
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
|
||||||
|
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
|
|
@ -5,6 +5,10 @@ certifi.py
|
||||||
This module returns the installation location of cacert.pem or its contents.
|
This module returns the installation location of cacert.pem or its contents.
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
|
import atexit
|
||||||
|
|
||||||
|
def exit_cacert_ctx() -> None:
|
||||||
|
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
if sys.version_info >= (3, 11):
|
||||||
|
@ -35,6 +39,7 @@ if sys.version_info >= (3, 11):
|
||||||
# we will also store that at the global level as well.
|
# we will also store that at the global level as well.
|
||||||
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
||||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||||
|
atexit.register(exit_cacert_ctx)
|
||||||
|
|
||||||
return _CACERT_PATH
|
return _CACERT_PATH
|
||||||
|
|
||||||
|
@ -70,6 +75,7 @@ elif sys.version_info >= (3, 7):
|
||||||
# we will also store that at the global level as well.
|
# we will also store that at the global level as well.
|
||||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||||
|
atexit.register(exit_cacert_ctx)
|
||||||
|
|
||||||
return _CACERT_PATH
|
return _CACERT_PATH
|
||||||
|
|
||||||
|
|
|
@ -452,6 +452,6 @@ class WSGIErrorHandler(logging.Handler):
|
||||||
|
|
||||||
class LazyRfc3339UtcTime(object):
|
class LazyRfc3339UtcTime(object):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Return now() in RFC3339 UTC Format."""
|
"""Return utcnow() in RFC3339 UTC Format."""
|
||||||
now = datetime.datetime.now()
|
iso_formatted_now = datetime.datetime.utcnow().isoformat('T')
|
||||||
return now.isoformat('T') + 'Z'
|
return f'{iso_formatted_now!s}Z'
|
||||||
|
|
|
@ -622,13 +622,15 @@ def autovary(ignore=None, debug=False):
|
||||||
|
|
||||||
|
|
||||||
def convert_params(exception=ValueError, error=400):
|
def convert_params(exception=ValueError, error=400):
|
||||||
"""Convert request params based on function annotations, with error handling.
|
"""Convert request params based on function annotations.
|
||||||
|
|
||||||
exception
|
This function also processes errors that are subclasses of ``exception``.
|
||||||
Exception class to catch.
|
|
||||||
|
|
||||||
status
|
:param BaseException exception: Exception class to catch.
|
||||||
The HTTP error code to return to the client on failure.
|
:type exception: BaseException
|
||||||
|
|
||||||
|
:param error: The HTTP status code to return to the client on failure.
|
||||||
|
:type error: int
|
||||||
"""
|
"""
|
||||||
request = cherrypy.serving.request
|
request = cherrypy.serving.request
|
||||||
types = request.handler.callable.__annotations__
|
types = request.handler.callable.__annotations__
|
||||||
|
|
|
@ -47,7 +47,9 @@ try:
|
||||||
import pstats
|
import pstats
|
||||||
|
|
||||||
def new_func_strip_path(func_name):
|
def new_func_strip_path(func_name):
|
||||||
"""Make profiler output more readable by adding `__init__` modules' parents
|
"""Add ``__init__`` modules' parents.
|
||||||
|
|
||||||
|
This makes the profiler output more readable.
|
||||||
"""
|
"""
|
||||||
filename, line, name = func_name
|
filename, line, name = func_name
|
||||||
if filename.endswith('__init__.py'):
|
if filename.endswith('__init__.py'):
|
||||||
|
|
|
@ -188,7 +188,7 @@ class Parser(configparser.ConfigParser):
|
||||||
|
|
||||||
def dict_from_file(self, file):
|
def dict_from_file(self, file):
|
||||||
if hasattr(file, 'read'):
|
if hasattr(file, 'read'):
|
||||||
self.readfp(file)
|
self.read_file(file)
|
||||||
else:
|
else:
|
||||||
self.read(file)
|
self.read(file)
|
||||||
return self.as_dict()
|
return self.as_dict()
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
"""Module with helpers for serving static files."""
|
"""Module with helpers for serving static files."""
|
||||||
|
|
||||||
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import stat
|
import stat
|
||||||
import mimetypes
|
|
||||||
import urllib.parse
|
|
||||||
import unicodedata
|
import unicodedata
|
||||||
|
import urllib.parse
|
||||||
from email.generator import _make_boundary as make_boundary
|
from email.generator import _make_boundary as make_boundary
|
||||||
from io import UnsupportedOperation
|
from io import UnsupportedOperation
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
from cherrypy._cpcompat import ntob
|
from cherrypy._cpcompat import ntob
|
||||||
from cherrypy.lib import cptools, httputil, file_generator_limited
|
from cherrypy.lib import cptools, file_generator_limited, httputil
|
||||||
|
|
||||||
|
|
||||||
def _setup_mimetypes():
|
def _setup_mimetypes():
|
||||||
|
@ -185,7 +184,10 @@ def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
|
||||||
|
|
||||||
|
|
||||||
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
def _serve_fileobj(fileobj, content_type, content_length, debug=False):
|
||||||
"""Internal. Set response.body to the given file object, perhaps ranged."""
|
"""Set ``response.body`` to the given file object, perhaps ranged.
|
||||||
|
|
||||||
|
Internal helper.
|
||||||
|
"""
|
||||||
response = cherrypy.serving.response
|
response = cherrypy.serving.response
|
||||||
|
|
||||||
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
|
# HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
|
||||||
|
|
|
@ -494,7 +494,7 @@ class Bus(object):
|
||||||
"Cannot reconstruct command from '-c'. "
|
"Cannot reconstruct command from '-c'. "
|
||||||
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
"""It looks Py_GetArgcArgv is completely absent in some environments
|
"""It looks Py_GetArgcArgv's completely absent in some environments
|
||||||
|
|
||||||
It is known, that there's no Py_GetArgcArgv in MS Windows and
|
It is known, that there's no Py_GetArgcArgv in MS Windows and
|
||||||
``ctypes`` module is completely absent in Google AppEngine
|
``ctypes`` module is completely absent in Google AppEngine
|
||||||
|
|
|
@ -136,6 +136,9 @@ class HTTPTests(helper.CPWebCase):
|
||||||
self.assertStatus(200)
|
self.assertStatus(200)
|
||||||
self.assertBody(b'Hello world!')
|
self.assertBody(b'Hello world!')
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
c.close()
|
||||||
|
|
||||||
# Now send a message that has no Content-Length, but does send a body.
|
# Now send a message that has no Content-Length, but does send a body.
|
||||||
# Verify that CP times out the socket and responds
|
# Verify that CP times out the socket and responds
|
||||||
# with 411 Length Required.
|
# with 411 Length Required.
|
||||||
|
@ -159,6 +162,9 @@ class HTTPTests(helper.CPWebCase):
|
||||||
self.status = str(response.status)
|
self.status = str(response.status)
|
||||||
self.assertStatus(411)
|
self.assertStatus(411)
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
c.close()
|
||||||
|
|
||||||
def test_post_multipart(self):
|
def test_post_multipart(self):
|
||||||
alphabet = 'abcdefghijklmnopqrstuvwxyz'
|
alphabet = 'abcdefghijklmnopqrstuvwxyz'
|
||||||
# generate file contents for a large post
|
# generate file contents for a large post
|
||||||
|
@ -184,6 +190,9 @@ class HTTPTests(helper.CPWebCase):
|
||||||
parts = ['%s * 65536' % ch for ch in alphabet]
|
parts = ['%s * 65536' % ch for ch in alphabet]
|
||||||
self.assertBody(', '.join(parts))
|
self.assertBody(', '.join(parts))
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
c.close()
|
||||||
|
|
||||||
def test_post_filename_with_special_characters(self):
|
def test_post_filename_with_special_characters(self):
|
||||||
"""Testing that we can handle filenames with special characters.
|
"""Testing that we can handle filenames with special characters.
|
||||||
|
|
||||||
|
@ -217,6 +226,9 @@ class HTTPTests(helper.CPWebCase):
|
||||||
self.assertStatus(200)
|
self.assertStatus(200)
|
||||||
self.assertBody(fname)
|
self.assertBody(fname)
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
c.close()
|
||||||
|
|
||||||
def test_malformed_request_line(self):
|
def test_malformed_request_line(self):
|
||||||
if getattr(cherrypy.server, 'using_apache', False):
|
if getattr(cherrypy.server, 'using_apache', False):
|
||||||
return self.skip('skipped due to known Apache differences...')
|
return self.skip('skipped due to known Apache differences...')
|
||||||
|
@ -264,6 +276,9 @@ class HTTPTests(helper.CPWebCase):
|
||||||
self.body = response.fp.read(20)
|
self.body = response.fp.read(20)
|
||||||
self.assertBody('Illegal header line.')
|
self.assertBody('Illegal header line.')
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
c.close()
|
||||||
|
|
||||||
def test_http_over_https(self):
|
def test_http_over_https(self):
|
||||||
if self.scheme != 'https':
|
if self.scheme != 'https':
|
||||||
return self.skip('skipped (not running HTTPS)... ')
|
return self.skip('skipped (not running HTTPS)... ')
|
||||||
|
|
|
@ -150,6 +150,8 @@ class IteratorTest(helper.CPWebCase):
|
||||||
self.assertStatus(200)
|
self.assertStatus(200)
|
||||||
self.assertBody('0')
|
self.assertBody('0')
|
||||||
|
|
||||||
|
itr_conn.close()
|
||||||
|
|
||||||
# Now we do the same check with streaming - some classes will
|
# Now we do the same check with streaming - some classes will
|
||||||
# be automatically closed, while others cannot.
|
# be automatically closed, while others cannot.
|
||||||
stream_counts = {}
|
stream_counts = {}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""Basic tests for the CherryPy core: request handling."""
|
"""Basic tests for the CherryPy core: request handling."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from cheroot.test import webtest
|
from cheroot.test import webtest
|
||||||
|
@ -197,6 +198,33 @@ def test_custom_log_format(log_tracker, monkeypatch, server):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_utc_in_timez(monkeypatch):
|
||||||
|
"""Test that ``LazyRfc3339UtcTime`` is rendered as ``str`` using UTC timestamp."""
|
||||||
|
utcoffset8_local_time_in_naive_utc = (
|
||||||
|
datetime.datetime(
|
||||||
|
year=2020,
|
||||||
|
month=1,
|
||||||
|
day=1,
|
||||||
|
hour=1,
|
||||||
|
minute=23,
|
||||||
|
second=45,
|
||||||
|
tzinfo=datetime.timezone(datetime.timedelta(hours=8)),
|
||||||
|
)
|
||||||
|
.astimezone(datetime.timezone.utc)
|
||||||
|
.replace(tzinfo=None)
|
||||||
|
)
|
||||||
|
|
||||||
|
class mock_datetime:
|
||||||
|
@classmethod
|
||||||
|
def utcnow(cls):
|
||||||
|
return utcoffset8_local_time_in_naive_utc
|
||||||
|
|
||||||
|
monkeypatch.setattr('datetime.datetime', mock_datetime)
|
||||||
|
rfc3339_utc_time = str(cherrypy._cplogging.LazyRfc3339UtcTime())
|
||||||
|
expected_time = '2019-12-31T17:23:45Z'
|
||||||
|
assert rfc3339_utc_time == expected_time
|
||||||
|
|
||||||
|
|
||||||
def test_timez_log_format(log_tracker, monkeypatch, server):
|
def test_timez_log_format(log_tracker, monkeypatch, server):
|
||||||
"""Test a customized access_log_format string, which is a
|
"""Test a customized access_log_format string, which is a
|
||||||
feature of _cplogging.LogManager.access()."""
|
feature of _cplogging.LogManager.access()."""
|
||||||
|
|
|
@ -38,7 +38,7 @@ CL_BLANK = "data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAA
|
||||||
URI_SCHEME = "cloudinary"
|
URI_SCHEME = "cloudinary"
|
||||||
API_VERSION = "v1_1"
|
API_VERSION = "v1_1"
|
||||||
|
|
||||||
VERSION = "1.34.0"
|
VERSION = "1.39.1"
|
||||||
|
|
||||||
_USER_PLATFORM_DETAILS = "; ".join((platform(), "Python {}".format(python_version())))
|
_USER_PLATFORM_DETAILS = "; ".join((platform(), "Python {}".format(python_version())))
|
||||||
|
|
||||||
|
@ -741,7 +741,11 @@ class CloudinaryResource(object):
|
||||||
:return: Video tag
|
:return: Video tag
|
||||||
"""
|
"""
|
||||||
public_id = options.get('public_id', self.public_id)
|
public_id = options.get('public_id', self.public_id)
|
||||||
source = re.sub(r"\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
use_fetch_format = options.get('use_fetch_format', config().use_fetch_format)
|
||||||
|
if not use_fetch_format:
|
||||||
|
source = re.sub(r"\.({0})$".format("|".join(self.default_source_types())), '', public_id)
|
||||||
|
else:
|
||||||
|
source = public_id
|
||||||
|
|
||||||
custom_attributes = options.pop("attributes", dict())
|
custom_attributes = options.pop("attributes", dict())
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,8 @@ from cloudinary import utils
|
||||||
from cloudinary.api_client.call_api import (
|
from cloudinary.api_client.call_api import (
|
||||||
call_api,
|
call_api,
|
||||||
call_metadata_api,
|
call_metadata_api,
|
||||||
call_json_api
|
call_json_api,
|
||||||
|
_call_v2_api
|
||||||
)
|
)
|
||||||
from cloudinary.exceptions import (
|
from cloudinary.exceptions import (
|
||||||
BadRequest,
|
BadRequest,
|
||||||
|
@ -54,6 +55,19 @@ def usage(**options):
|
||||||
return call_api("get", uri, {}, **options)
|
return call_api("get", uri, {}, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def config(**options):
|
||||||
|
"""
|
||||||
|
Get account config details.
|
||||||
|
|
||||||
|
:param options: Additional options.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: Detailed config information.
|
||||||
|
:rtype: Response
|
||||||
|
"""
|
||||||
|
params = only(options, "settings")
|
||||||
|
return call_api("get", ["config"], params, **options)
|
||||||
|
|
||||||
|
|
||||||
def resource_types(**options):
|
def resource_types(**options):
|
||||||
return call_api("get", ["resources"], {}, **options)
|
return call_api("get", ["resources"], {}, **options)
|
||||||
|
|
||||||
|
@ -64,24 +78,22 @@ def resources(**options):
|
||||||
uri = ["resources", resource_type]
|
uri = ["resources", resource_type]
|
||||||
if upload_type:
|
if upload_type:
|
||||||
uri.append(upload_type)
|
uri.append(upload_type)
|
||||||
params = only(options, "next_cursor", "max_results", "prefix", "tags",
|
params = __list_resources_params(**options)
|
||||||
"context", "moderations", "direction", "start_at", "metadata")
|
params.update(only(options, "prefix", "start_at"))
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def resources_by_tag(tag, **options):
|
def resources_by_tag(tag, **options):
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
uri = ["resources", resource_type, "tags", tag]
|
uri = ["resources", resource_type, "tags", tag]
|
||||||
params = only(options, "next_cursor", "max_results", "tags",
|
params = __list_resources_params(**options)
|
||||||
"context", "moderations", "direction", "metadata")
|
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def resources_by_moderation(kind, status, **options):
|
def resources_by_moderation(kind, status, **options):
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
uri = ["resources", resource_type, "moderations", kind, status]
|
uri = ["resources", resource_type, "moderations", kind, status]
|
||||||
params = only(options, "next_cursor", "max_results", "tags",
|
params = __list_resources_params(**options)
|
||||||
"context", "moderations", "direction", "metadata")
|
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
@ -89,7 +101,7 @@ def resources_by_ids(public_ids, **options):
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
upload_type = options.pop("type", "upload")
|
upload_type = options.pop("type", "upload")
|
||||||
uri = ["resources", resource_type, upload_type]
|
uri = ["resources", resource_type, upload_type]
|
||||||
params = dict(only(options, "tags", "moderations", "context"), public_ids=public_ids)
|
params = dict(__resources_params(**options), public_ids=public_ids)
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,7 +117,7 @@ def resources_by_asset_folder(asset_folder, **options):
|
||||||
:rtype: Response
|
:rtype: Response
|
||||||
"""
|
"""
|
||||||
uri = ["resources", "by_asset_folder"]
|
uri = ["resources", "by_asset_folder"]
|
||||||
params = only(options, "max_results", "tags", "moderations", "context", "next_cursor")
|
params = __list_resources_params(**options)
|
||||||
params["asset_folder"] = asset_folder
|
params["asset_folder"] = asset_folder
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
@ -125,7 +137,7 @@ def resources_by_asset_ids(asset_ids, **options):
|
||||||
:rtype: Response
|
:rtype: Response
|
||||||
"""
|
"""
|
||||||
uri = ["resources", 'by_asset_ids']
|
uri = ["resources", 'by_asset_ids']
|
||||||
params = dict(only(options, "tags", "moderations", "context"), asset_ids=asset_ids)
|
params = dict(__resources_params(**options), asset_ids=asset_ids)
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
@ -147,15 +159,43 @@ def resources_by_context(key, value=None, **options):
|
||||||
"""
|
"""
|
||||||
resource_type = options.pop("resource_type", "image")
|
resource_type = options.pop("resource_type", "image")
|
||||||
uri = ["resources", resource_type, "context"]
|
uri = ["resources", resource_type, "context"]
|
||||||
params = only(options, "next_cursor", "max_results", "tags",
|
params = __list_resources_params(**options)
|
||||||
"context", "moderations", "direction", "metadata")
|
|
||||||
params["key"] = key
|
params["key"] = key
|
||||||
if value is not None:
|
if value is not None:
|
||||||
params["value"] = value
|
params["value"] = value
|
||||||
return call_api("get", uri, params, **options)
|
return call_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def visual_search(image_url=None, image_asset_id=None, text=None, **options):
|
def __resources_params(**options):
|
||||||
|
"""
|
||||||
|
Prepares optional parameters for resources_* API calls.
|
||||||
|
|
||||||
|
:param options: Additional options
|
||||||
|
:return: Optional parameters
|
||||||
|
|
||||||
|
:internal
|
||||||
|
"""
|
||||||
|
params = only(options, "tags", "context", "metadata", "moderations")
|
||||||
|
params["fields"] = options.get("fields") and utils.encode_list(utils.build_array(options["fields"]))
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def __list_resources_params(**options):
|
||||||
|
"""
|
||||||
|
Prepares optional parameters for resources_* API calls.
|
||||||
|
|
||||||
|
:param options: Additional options
|
||||||
|
:return: Optional parameters
|
||||||
|
|
||||||
|
:internal
|
||||||
|
"""
|
||||||
|
resources_params = __resources_params(**options)
|
||||||
|
resources_params.update(only(options, "next_cursor", "max_results", "direction"))
|
||||||
|
|
||||||
|
return resources_params
|
||||||
|
|
||||||
|
|
||||||
|
def visual_search(image_url=None, image_asset_id=None, text=None, image_file=None, **options):
|
||||||
"""
|
"""
|
||||||
Find images based on their visual content.
|
Find images based on their visual content.
|
||||||
|
|
||||||
|
@ -165,14 +205,17 @@ def visual_search(image_url=None, image_asset_id=None, text=None, **options):
|
||||||
:type image_asset_id: str
|
:type image_asset_id: str
|
||||||
:param text: A textual description, e.g., "cat"
|
:param text: A textual description, e.g., "cat"
|
||||||
:type text: str
|
:type text: str
|
||||||
|
:param image_file: The image file.
|
||||||
|
:type image_file: str|callable|Path|bytes
|
||||||
:param options: Additional options
|
:param options: Additional options
|
||||||
:type options: dict, optional
|
:type options: dict, optional
|
||||||
:return: Resources (assets) that were found
|
:return: Resources (assets) that were found
|
||||||
:rtype: Response
|
:rtype: Response
|
||||||
"""
|
"""
|
||||||
uri = ["resources", "visual_search"]
|
uri = ["resources", "visual_search"]
|
||||||
params = {"image_url": image_url, "image_asset_id": image_asset_id, "text": text}
|
params = {"image_url": image_url, "image_asset_id": image_asset_id, "text": text,
|
||||||
return call_api("get", uri, params, **options)
|
"image_file": utils.handle_file_parameter(image_file, "file")}
|
||||||
|
return call_api("post", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
def resource(public_id, **options):
|
def resource(public_id, **options):
|
||||||
|
@ -224,11 +267,11 @@ def update(public_id, **options):
|
||||||
if "tags" in options:
|
if "tags" in options:
|
||||||
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
params["tags"] = ",".join(utils.build_array(options["tags"]))
|
||||||
if "face_coordinates" in options:
|
if "face_coordinates" in options:
|
||||||
params["face_coordinates"] = utils.encode_double_array(
|
params["face_coordinates"] = utils.encode_double_array(options.get("face_coordinates"))
|
||||||
options.get("face_coordinates"))
|
|
||||||
if "custom_coordinates" in options:
|
if "custom_coordinates" in options:
|
||||||
params["custom_coordinates"] = utils.encode_double_array(
|
params["custom_coordinates"] = utils.encode_double_array(options.get("custom_coordinates"))
|
||||||
options.get("custom_coordinates"))
|
if "regions" in options:
|
||||||
|
params["regions"] = utils.json_encode(options.get("regions"))
|
||||||
if "context" in options:
|
if "context" in options:
|
||||||
params["context"] = utils.encode_context(options.get("context"))
|
params["context"] = utils.encode_context(options.get("context"))
|
||||||
if "metadata" in options:
|
if "metadata" in options:
|
||||||
|
@ -656,9 +699,8 @@ def add_metadata_field(field, **options):
|
||||||
|
|
||||||
:rtype: Response
|
:rtype: Response
|
||||||
"""
|
"""
|
||||||
params = only(field, "type", "external_id", "label", "mandatory",
|
|
||||||
"default_value", "validation", "datasource")
|
return call_metadata_api("post", [], __metadata_field_params(field), **options)
|
||||||
return call_metadata_api("post", [], params, **options)
|
|
||||||
|
|
||||||
|
|
||||||
def update_metadata_field(field_external_id, field, **options):
|
def update_metadata_field(field_external_id, field, **options):
|
||||||
|
@ -677,8 +719,13 @@ def update_metadata_field(field_external_id, field, **options):
|
||||||
:rtype: Response
|
:rtype: Response
|
||||||
"""
|
"""
|
||||||
uri = [field_external_id]
|
uri = [field_external_id]
|
||||||
params = only(field, "label", "mandatory", "default_value", "validation")
|
|
||||||
return call_metadata_api("put", uri, params, **options)
|
return call_metadata_api("put", uri, __metadata_field_params(field), **options)
|
||||||
|
|
||||||
|
|
||||||
|
def __metadata_field_params(field):
|
||||||
|
return only(field, "type", "external_id", "label", "mandatory", "restrictions",
|
||||||
|
"default_value", "validation", "datasource")
|
||||||
|
|
||||||
|
|
||||||
def delete_metadata_field(field_external_id, **options):
|
def delete_metadata_field(field_external_id, **options):
|
||||||
|
@ -798,3 +845,18 @@ def reorder_metadata_fields(order_by, direction=None, **options):
|
||||||
uri = ['order']
|
uri = ['order']
|
||||||
params = {'order_by': order_by, 'direction': direction}
|
params = {'order_by': order_by, 'direction': direction}
|
||||||
return call_metadata_api('put', uri, params, **options)
|
return call_metadata_api('put', uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def analyze(input_type, analysis_type, uri=None, **options):
|
||||||
|
"""Analyzes an asset with the requested analysis type.
|
||||||
|
|
||||||
|
:param input_type: The type of input for the asset to analyze ('uri').
|
||||||
|
:param analysis_type: The type of analysis to run ('google_tagging', 'captioning', 'fashion').
|
||||||
|
:param uri: The URI of the asset to analyze.
|
||||||
|
:param options: Additional options.
|
||||||
|
|
||||||
|
:rtype: Response
|
||||||
|
"""
|
||||||
|
api_uri = ['analysis', 'analyze', input_type]
|
||||||
|
params = {'analysis_type': analysis_type, 'uri': uri, 'parameters': options.get("parameters")}
|
||||||
|
return _call_v2_api('post', api_uri, params, **options)
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import cloudinary
|
import cloudinary
|
||||||
from cloudinary.api_client.execute_request import execute_request
|
from cloudinary.api_client.execute_request import execute_request
|
||||||
from cloudinary.provisioning.account_config import account_config
|
from cloudinary.provisioning.account_config import account_config
|
||||||
from cloudinary.utils import get_http_connector
|
from cloudinary.utils import get_http_connector, normalize_params
|
||||||
|
|
||||||
|
|
||||||
PROVISIONING_SUB_PATH = "provisioning"
|
PROVISIONING_SUB_PATH = "provisioning"
|
||||||
ACCOUNT_SUB_PATH = "accounts"
|
ACCOUNT_SUB_PATH = "accounts"
|
||||||
|
@ -28,7 +27,7 @@ def _call_account_api(method, uri, params=None, headers=None, **options):
|
||||||
|
|
||||||
return execute_request(http_connector=_http,
|
return execute_request(http_connector=_http,
|
||||||
method=method,
|
method=method,
|
||||||
params=params,
|
params=normalize_params(params),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
auth=auth,
|
auth=auth,
|
||||||
api_url=provisioning_api_url,
|
api_url=provisioning_api_url,
|
||||||
|
|
|
@ -2,8 +2,7 @@ import json
|
||||||
|
|
||||||
import cloudinary
|
import cloudinary
|
||||||
from cloudinary.api_client.execute_request import execute_request
|
from cloudinary.api_client.execute_request import execute_request
|
||||||
from cloudinary.utils import get_http_connector
|
from cloudinary.utils import get_http_connector, normalize_params
|
||||||
|
|
||||||
|
|
||||||
logger = cloudinary.logger
|
logger = cloudinary.logger
|
||||||
_http = get_http_connector(cloudinary.config(), cloudinary.CERT_KWARGS)
|
_http = get_http_connector(cloudinary.config(), cloudinary.CERT_KWARGS)
|
||||||
|
@ -27,6 +26,10 @@ def call_json_api(method, uri, json_body, **options):
|
||||||
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
return _call_api(method, uri, body=data, headers={'Content-Type': 'application/json'}, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def _call_v2_api(method, uri, json_body, **options):
|
||||||
|
return call_json_api(method, uri, json_body=json_body, api_version='v2', **options)
|
||||||
|
|
||||||
|
|
||||||
def call_api(method, uri, params, **options):
|
def call_api(method, uri, params, **options):
|
||||||
return _call_api(method, uri, params=params, **options)
|
return _call_api(method, uri, params=params, **options)
|
||||||
|
|
||||||
|
@ -43,10 +46,11 @@ def _call_api(method, uri, params=None, body=None, headers=None, extra_headers=N
|
||||||
oauth_token = options.pop("oauth_token", cloudinary.config().oauth_token)
|
oauth_token = options.pop("oauth_token", cloudinary.config().oauth_token)
|
||||||
|
|
||||||
_validate_authorization(api_key, api_secret, oauth_token)
|
_validate_authorization(api_key, api_secret, oauth_token)
|
||||||
|
|
||||||
api_url = "/".join([prefix, cloudinary.API_VERSION, cloud_name] + uri)
|
|
||||||
auth = {"key": api_key, "secret": api_secret, "oauth_token": oauth_token}
|
auth = {"key": api_key, "secret": api_secret, "oauth_token": oauth_token}
|
||||||
|
|
||||||
|
api_version = options.pop("api_version", cloudinary.API_VERSION)
|
||||||
|
api_url = "/".join([prefix, api_version, cloud_name] + uri)
|
||||||
|
|
||||||
if body is not None:
|
if body is not None:
|
||||||
options["body"] = body
|
options["body"] = body
|
||||||
|
|
||||||
|
@ -55,7 +59,7 @@ def _call_api(method, uri, params=None, body=None, headers=None, extra_headers=N
|
||||||
|
|
||||||
return execute_request(http_connector=_http,
|
return execute_request(http_connector=_http,
|
||||||
method=method,
|
method=method,
|
||||||
params=params,
|
params=normalize_params(params),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
auth=auth,
|
auth=auth,
|
||||||
api_url=api_url,
|
api_url=api_url,
|
||||||
|
|
|
@ -63,9 +63,8 @@ def execute_request(http_connector, method, params, headers, auth, api_url, **op
|
||||||
processed_params = process_params(params)
|
processed_params = process_params(params)
|
||||||
|
|
||||||
api_url = smart_escape(unquote(api_url))
|
api_url = smart_escape(unquote(api_url))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = http_connector.request(method.upper(), api_url, processed_params, req_headers, **kw)
|
response = http_connector.request(method=method.upper(), url=api_url, fields=processed_params, headers=req_headers, **kw)
|
||||||
body = response.data
|
body = response.data
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise GeneralError("Unexpected error %s" % str(e))
|
raise GeneralError("Unexpected error %s" % str(e))
|
||||||
|
|
|
@ -24,7 +24,7 @@ class HttpClient:
|
||||||
|
|
||||||
def get_json(self, url):
|
def get_json(self, url):
|
||||||
try:
|
try:
|
||||||
response = self._http_client.request("GET", url, timeout=self.timeout)
|
response = self._http_client.request(method="GET", url=url, timeout=self.timeout)
|
||||||
body = response.data
|
body = response.data
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise GeneralError("Unexpected error %s" % str(e))
|
raise GeneralError("Unexpected error %s" % str(e))
|
||||||
|
|
|
@ -2,4 +2,5 @@ from .account_config import AccountConfig, account_config, reset_config
|
||||||
from .account import (sub_accounts, create_sub_account, delete_sub_account, sub_account, update_sub_account,
|
from .account import (sub_accounts, create_sub_account, delete_sub_account, sub_account, update_sub_account,
|
||||||
user_groups, create_user_group, update_user_group, delete_user_group, user_group,
|
user_groups, create_user_group, update_user_group, delete_user_group, user_group,
|
||||||
add_user_to_group, remove_user_from_group, user_group_users, user_in_user_groups,
|
add_user_to_group, remove_user_from_group, user_group_users, user_in_user_groups,
|
||||||
users, create_user, delete_user, user, update_user, Role)
|
users, create_user, delete_user, user, update_user, access_keys, generate_access_key,
|
||||||
|
update_access_key, delete_access_key, Role)
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
from cloudinary.api_client.call_account_api import _call_account_api
|
from cloudinary.api_client.call_account_api import _call_account_api
|
||||||
from cloudinary.utils import encode_list
|
from cloudinary.utils import encode_list
|
||||||
|
|
||||||
|
|
||||||
SUB_ACCOUNTS_SUB_PATH = "sub_accounts"
|
SUB_ACCOUNTS_SUB_PATH = "sub_accounts"
|
||||||
USERS_SUB_PATH = "users"
|
USERS_SUB_PATH = "users"
|
||||||
USER_GROUPS_SUB_PATH = "user_groups"
|
USER_GROUPS_SUB_PATH = "user_groups"
|
||||||
|
ACCESS_KEYS = "access_keys"
|
||||||
|
|
||||||
|
|
||||||
class Role(object):
|
class Role(object):
|
||||||
|
@ -123,7 +123,8 @@ def update_sub_account(sub_account_id, name=None, cloud_name=None, custom_attrib
|
||||||
return _call_account_api("put", uri, params=params, **options)
|
return _call_account_api("put", uri, params=params, **options)
|
||||||
|
|
||||||
|
|
||||||
def users(user_ids=None, sub_account_id=None, pending=None, prefix=None, **options):
|
def users(user_ids=None, sub_account_id=None, pending=None, prefix=None, last_login=None, from_date=None, to_date=None,
|
||||||
|
**options):
|
||||||
"""
|
"""
|
||||||
List all users
|
List all users
|
||||||
:param user_ids: The ids of the users to fetch
|
:param user_ids: The ids of the users to fetch
|
||||||
|
@ -136,6 +137,13 @@ def users(user_ids=None, sub_account_id=None, pending=None, prefix=None, **optio
|
||||||
:type pending: bool, optional
|
:type pending: bool, optional
|
||||||
:param prefix: User prefix
|
:param prefix: User prefix
|
||||||
:type prefix: str, optional
|
:type prefix: str, optional
|
||||||
|
:param last_login: Return only users that last logged in in the specified range of dates (true),
|
||||||
|
users that didn't last logged in in that range (false), or all users (None).
|
||||||
|
:type last_login: bool, optional
|
||||||
|
:param from_date: Last login start date.
|
||||||
|
:type from_date: datetime, optional
|
||||||
|
:param to_date: Last login end date.
|
||||||
|
:type to_date: datetime, optional
|
||||||
:param options: Generic advanced options dict, see online documentation.
|
:param options: Generic advanced options dict, see online documentation.
|
||||||
:type options: dict, optional
|
:type options: dict, optional
|
||||||
:return: List of users associated with the account
|
:return: List of users associated with the account
|
||||||
|
@ -146,7 +154,10 @@ def users(user_ids=None, sub_account_id=None, pending=None, prefix=None, **optio
|
||||||
params = {"ids": user_ids,
|
params = {"ids": user_ids,
|
||||||
"sub_account_id": sub_account_id,
|
"sub_account_id": sub_account_id,
|
||||||
"pending": pending,
|
"pending": pending,
|
||||||
"prefix": prefix}
|
"prefix": prefix,
|
||||||
|
"last_login": last_login,
|
||||||
|
"from": from_date,
|
||||||
|
"to": to_date}
|
||||||
return _call_account_api("get", uri, params=params, **options)
|
return _call_account_api("get", uri, params=params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
@ -351,7 +362,7 @@ def user_in_user_groups(user_id, **options):
|
||||||
"""
|
"""
|
||||||
Get all user groups a user belongs to
|
Get all user groups a user belongs to
|
||||||
:param user_id: The id of user
|
:param user_id: The id of user
|
||||||
:param user_id: str
|
:type user_id: str
|
||||||
:param options: Generic advanced options dict, see online documentation
|
:param options: Generic advanced options dict, see online documentation
|
||||||
:type options: dict, optional
|
:type options: dict, optional
|
||||||
:return: List of groups user is in
|
:return: List of groups user is in
|
||||||
|
@ -359,3 +370,112 @@ def user_in_user_groups(user_id, **options):
|
||||||
"""
|
"""
|
||||||
uri = [USER_GROUPS_SUB_PATH, user_id]
|
uri = [USER_GROUPS_SUB_PATH, user_id]
|
||||||
return _call_account_api("get", uri, {}, **options)
|
return _call_account_api("get", uri, {}, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def access_keys(sub_account_id, page_size=None, page=None, sort_by=None, sort_order=None, **options):
|
||||||
|
"""
|
||||||
|
Get sub account access keys.
|
||||||
|
|
||||||
|
:param sub_account_id: The id of the sub account.
|
||||||
|
:type sub_account_id: str
|
||||||
|
:param page_size: How many entries to display on each page.
|
||||||
|
:type page_size: int
|
||||||
|
:param page: Which page to return (maximum pages: 100). **Default**: All pages are returned.
|
||||||
|
:type page: int
|
||||||
|
:param sort_by: Which response parameter to sort by.
|
||||||
|
**Possible values**: `api_key`, `created_at`, `name`, `enabled`.
|
||||||
|
:type sort_by: str
|
||||||
|
:param sort_order: Control the order of returned keys. **Possible values**: `desc` (default), `asc`.
|
||||||
|
:type sort_order: str
|
||||||
|
:param options: Generic advanced options dict, see online documentation.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: List of access keys
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id, ACCESS_KEYS]
|
||||||
|
params = {
|
||||||
|
"page_size": page_size,
|
||||||
|
"page": page,
|
||||||
|
"sort_by": sort_by,
|
||||||
|
"sort_order": sort_order,
|
||||||
|
}
|
||||||
|
return _call_account_api("get", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_access_key(sub_account_id, name=None, enabled=None, **options):
|
||||||
|
"""
|
||||||
|
Generate a new access key.
|
||||||
|
|
||||||
|
:param sub_account_id: The id of the sub account.
|
||||||
|
:type sub_account_id: str
|
||||||
|
:param name: The name of the new access key.
|
||||||
|
:type name: str
|
||||||
|
:param enabled: Whether the new access key is enabled or disabled.
|
||||||
|
:type enabled: bool
|
||||||
|
:param options: Generic advanced options dict, see online documentation.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: Access key details
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id, ACCESS_KEYS]
|
||||||
|
params = {
|
||||||
|
"name": name,
|
||||||
|
"enabled": enabled,
|
||||||
|
}
|
||||||
|
return _call_account_api("post", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def update_access_key(sub_account_id, api_key, name=None, enabled=None, dedicated_for=None, **options):
|
||||||
|
"""
|
||||||
|
Update the name and/or status of an existing access key.
|
||||||
|
|
||||||
|
:param sub_account_id: The id of the sub account.
|
||||||
|
:type sub_account_id: str
|
||||||
|
:param api_key: The API key of the access key.
|
||||||
|
:type api_key: str|int
|
||||||
|
:param name: The updated name of the access key.
|
||||||
|
:type name: str
|
||||||
|
:param enabled: Enable or disable the access key.
|
||||||
|
:type enabled: bool
|
||||||
|
:param dedicated_for: Designates the access key for a specific purpose while allowing it to be used for
|
||||||
|
other purposes, as well. This action replaces any previously assigned key.
|
||||||
|
**Possible values**: `webhooks`
|
||||||
|
:type dedicated_for: str
|
||||||
|
:param options: Generic advanced options dict, see online documentation.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: Access key details
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id, ACCESS_KEYS, str(api_key)]
|
||||||
|
params = {
|
||||||
|
"name": name,
|
||||||
|
"enabled": enabled,
|
||||||
|
"dedicated_for": dedicated_for,
|
||||||
|
}
|
||||||
|
return _call_account_api("put", uri, params, **options)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_access_key(sub_account_id, api_key=None, name=None, **options):
|
||||||
|
"""
|
||||||
|
Delete an existing access key by api_key or by name.
|
||||||
|
|
||||||
|
:param sub_account_id: The id of the sub account.
|
||||||
|
:type sub_account_id: str
|
||||||
|
:param api_key: The API key of the access key.
|
||||||
|
:type api_key: str|int
|
||||||
|
:param name: The name of the access key.
|
||||||
|
:type name: str
|
||||||
|
:param options: Generic advanced options dict, see online documentation.
|
||||||
|
:type options: dict, optional
|
||||||
|
:return: Operation status.
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id, ACCESS_KEYS]
|
||||||
|
|
||||||
|
if api_key is not None:
|
||||||
|
uri.append(str(api_key))
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"name": name
|
||||||
|
}
|
||||||
|
return _call_account_api("delete", uri, params, **options)
|
||||||
|
|
|
@ -3,8 +3,8 @@ import json
|
||||||
|
|
||||||
import cloudinary
|
import cloudinary
|
||||||
from cloudinary.api_client.call_api import call_json_api
|
from cloudinary.api_client.call_api import call_json_api
|
||||||
from cloudinary.utils import unique, unsigned_download_url_prefix, build_distribution_domain, base64url_encode, \
|
from cloudinary.utils import (unique, build_distribution_domain, base64url_encode, json_encode, compute_hex_hash,
|
||||||
json_encode, compute_hex_hash, SIGNATURE_SHA256
|
SIGNATURE_SHA256, build_array)
|
||||||
|
|
||||||
|
|
||||||
class Search(object):
|
class Search(object):
|
||||||
|
@ -16,6 +16,7 @@ class Search(object):
|
||||||
'sort_by': lambda x: next(iter(x)),
|
'sort_by': lambda x: next(iter(x)),
|
||||||
'aggregate': None,
|
'aggregate': None,
|
||||||
'with_field': None,
|
'with_field': None,
|
||||||
|
'fields': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
_ttl = 300 # Used for search URLs
|
_ttl = 300 # Used for search URLs
|
||||||
|
@ -57,6 +58,11 @@ class Search(object):
|
||||||
self._add("with_field", value)
|
self._add("with_field", value)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def fields(self, value):
|
||||||
|
"""Request which fields to return in the result set."""
|
||||||
|
self._add("fields", value)
|
||||||
|
return self
|
||||||
|
|
||||||
def ttl(self, ttl):
|
def ttl(self, ttl):
|
||||||
"""
|
"""
|
||||||
Sets the time to live of the search URL.
|
Sets the time to live of the search URL.
|
||||||
|
@ -133,5 +139,5 @@ class Search(object):
|
||||||
def _add(self, name, value):
|
def _add(self, name, value):
|
||||||
if name not in self.query:
|
if name not in self.query:
|
||||||
self.query[name] = []
|
self.query[name] = []
|
||||||
self.query[name].append(value)
|
self.query[name].extend(build_array(value))
|
||||||
return self
|
return self
|
||||||
|
|
|
@ -23,11 +23,6 @@ try: # Python 2.7+
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from urllib3.packages.ordered_dict import OrderedDict
|
from urllib3.packages.ordered_dict import OrderedDict
|
||||||
|
|
||||||
try: # Python 3.4+
|
|
||||||
from pathlib import Path as PathLibPathType
|
|
||||||
except ImportError:
|
|
||||||
PathLibPathType = None
|
|
||||||
|
|
||||||
if is_appengine_sandbox():
|
if is_appengine_sandbox():
|
||||||
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
||||||
_http = AppEngineManager()
|
_http = AppEngineManager()
|
||||||
|
@ -503,32 +498,7 @@ def call_api(action, params, http_headers=None, return_error=False, unsigned=Fal
|
||||||
|
|
||||||
if file:
|
if file:
|
||||||
filename = options.get("filename") # Custom filename provided by user (relevant only for streams and files)
|
filename = options.get("filename") # Custom filename provided by user (relevant only for streams and files)
|
||||||
|
param_list.append(("file", utils.handle_file_parameter(file, filename)))
|
||||||
if PathLibPathType and isinstance(file, PathLibPathType):
|
|
||||||
name = filename or file.name
|
|
||||||
data = file.read_bytes()
|
|
||||||
elif isinstance(file, string_types):
|
|
||||||
if utils.is_remote_url(file):
|
|
||||||
# URL
|
|
||||||
name = None
|
|
||||||
data = file
|
|
||||||
else:
|
|
||||||
# file path
|
|
||||||
name = filename or file
|
|
||||||
with open(file, "rb") as opened:
|
|
||||||
data = opened.read()
|
|
||||||
elif hasattr(file, 'read') and callable(file.read):
|
|
||||||
# stream
|
|
||||||
data = file.read()
|
|
||||||
name = filename or (file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream")
|
|
||||||
elif isinstance(file, tuple):
|
|
||||||
name, data = file
|
|
||||||
else:
|
|
||||||
# Not a string, not a stream
|
|
||||||
name = filename or "file"
|
|
||||||
data = file
|
|
||||||
|
|
||||||
param_list.append(("file", (name, data) if name else data))
|
|
||||||
|
|
||||||
kw = {}
|
kw = {}
|
||||||
if timeout is not None:
|
if timeout is not None:
|
||||||
|
@ -536,7 +506,7 @@ def call_api(action, params, http_headers=None, return_error=False, unsigned=Fal
|
||||||
|
|
||||||
code = 200
|
code = 200
|
||||||
try:
|
try:
|
||||||
response = _http.request("POST", api_url, param_list, headers, **kw)
|
response = _http.request(method="POST", url=api_url, fields=param_list, headers=headers, **kw)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise Error("Unexpected error - {0!r}".format(e))
|
raise Error("Unexpected error - {0!r}".format(e))
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
|
|
|
@ -25,6 +25,11 @@ from cloudinary import auth_token
|
||||||
from cloudinary.api_client.tcp_keep_alive_manager import TCPKeepAlivePoolManager, TCPKeepAliveProxyManager
|
from cloudinary.api_client.tcp_keep_alive_manager import TCPKeepAlivePoolManager, TCPKeepAliveProxyManager
|
||||||
from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse
|
from cloudinary.compat import PY3, to_bytes, to_bytearray, to_string, string_types, urlparse
|
||||||
|
|
||||||
|
try: # Python 3.4+
|
||||||
|
from pathlib import Path as PathLibPathType
|
||||||
|
except ImportError:
|
||||||
|
PathLibPathType = None
|
||||||
|
|
||||||
VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))'
|
VAR_NAME_RE = r'(\$\([a-zA-Z]\w+\))'
|
||||||
|
|
||||||
urlencode = six.moves.urllib.parse.urlencode
|
urlencode = six.moves.urllib.parse.urlencode
|
||||||
|
@ -127,6 +132,7 @@ __SERIALIZED_UPLOAD_PARAMS = [
|
||||||
"allowed_formats",
|
"allowed_formats",
|
||||||
"face_coordinates",
|
"face_coordinates",
|
||||||
"custom_coordinates",
|
"custom_coordinates",
|
||||||
|
"regions",
|
||||||
"context",
|
"context",
|
||||||
"auto_tagging",
|
"auto_tagging",
|
||||||
"responsive_breakpoints",
|
"responsive_breakpoints",
|
||||||
|
@ -181,12 +187,11 @@ def compute_hex_hash(s, algorithm=SIGNATURE_SHA1):
|
||||||
|
|
||||||
|
|
||||||
def build_array(arg):
|
def build_array(arg):
|
||||||
if isinstance(arg, list):
|
if isinstance(arg, (list, tuple)):
|
||||||
return arg
|
return arg
|
||||||
elif arg is None:
|
elif arg is None:
|
||||||
return []
|
return []
|
||||||
else:
|
return [arg]
|
||||||
return [arg]
|
|
||||||
|
|
||||||
|
|
||||||
def build_list_of_dicts(val):
|
def build_list_of_dicts(val):
|
||||||
|
@ -235,8 +240,7 @@ def encode_double_array(array):
|
||||||
array = build_array(array)
|
array = build_array(array)
|
||||||
if len(array) > 0 and isinstance(array[0], list):
|
if len(array) > 0 and isinstance(array[0], list):
|
||||||
return "|".join([",".join([str(i) for i in build_array(inner)]) for inner in array])
|
return "|".join([",".join([str(i) for i in build_array(inner)]) for inner in array])
|
||||||
else:
|
return encode_list([str(i) for i in array])
|
||||||
return encode_list([str(i) for i in array])
|
|
||||||
|
|
||||||
|
|
||||||
def encode_dict(arg):
|
def encode_dict(arg):
|
||||||
|
@ -246,8 +250,7 @@ def encode_dict(arg):
|
||||||
else:
|
else:
|
||||||
items = arg.iteritems()
|
items = arg.iteritems()
|
||||||
return "|".join((k + "=" + v) for k, v in items)
|
return "|".join((k + "=" + v) for k, v in items)
|
||||||
else:
|
return arg
|
||||||
return arg
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_context_value(value):
|
def normalize_context_value(value):
|
||||||
|
@ -288,9 +291,14 @@ def json_encode(value, sort_keys=False):
|
||||||
Converts value to a json encoded string
|
Converts value to a json encoded string
|
||||||
|
|
||||||
:param value: value to be encoded
|
:param value: value to be encoded
|
||||||
|
:param sort_keys: whether to sort keys
|
||||||
|
|
||||||
:return: JSON encoded string
|
:return: JSON encoded string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if isinstance(value, str) or value is None:
|
||||||
|
return value
|
||||||
|
|
||||||
return json.dumps(value, default=__json_serializer, separators=(',', ':'), sort_keys=sort_keys)
|
return json.dumps(value, default=__json_serializer, separators=(',', ':'), sort_keys=sort_keys)
|
||||||
|
|
||||||
|
|
||||||
|
@ -309,11 +317,13 @@ def patch_fetch_format(options):
|
||||||
"""
|
"""
|
||||||
When upload type is fetch, remove the format options.
|
When upload type is fetch, remove the format options.
|
||||||
In addition, set the fetch_format options to the format value unless it was already set.
|
In addition, set the fetch_format options to the format value unless it was already set.
|
||||||
Mutates the options parameter!
|
Mutates the "options" parameter!
|
||||||
|
|
||||||
:param options: URL and transformation options
|
:param options: URL and transformation options
|
||||||
"""
|
"""
|
||||||
if options.get("type", "upload") != "fetch":
|
use_fetch_format = options.pop("use_fetch_format", cloudinary.config().use_fetch_format)
|
||||||
|
|
||||||
|
if options.get("type", "upload") != "fetch" and not use_fetch_format:
|
||||||
return
|
return
|
||||||
|
|
||||||
resource_format = options.pop("format", None)
|
resource_format = options.pop("format", None)
|
||||||
|
@ -351,8 +361,7 @@ def generate_transformation_string(**options):
|
||||||
def recurse(bs):
|
def recurse(bs):
|
||||||
if isinstance(bs, dict):
|
if isinstance(bs, dict):
|
||||||
return generate_transformation_string(**bs)[0]
|
return generate_transformation_string(**bs)[0]
|
||||||
else:
|
return generate_transformation_string(transformation=bs)[0]
|
||||||
return generate_transformation_string(transformation=bs)[0]
|
|
||||||
|
|
||||||
base_transformations = list(map(recurse, base_transformations))
|
base_transformations = list(map(recurse, base_transformations))
|
||||||
named_transformation = None
|
named_transformation = None
|
||||||
|
@ -375,7 +384,7 @@ def generate_transformation_string(**options):
|
||||||
flags = ".".join(build_array(options.pop("flags", None)))
|
flags = ".".join(build_array(options.pop("flags", None)))
|
||||||
dpr = options.pop("dpr", cloudinary.config().dpr)
|
dpr = options.pop("dpr", cloudinary.config().dpr)
|
||||||
duration = norm_range_value(options.pop("duration", None))
|
duration = norm_range_value(options.pop("duration", None))
|
||||||
|
|
||||||
so_raw = options.pop("start_offset", None)
|
so_raw = options.pop("start_offset", None)
|
||||||
start_offset = norm_auto_range_value(so_raw)
|
start_offset = norm_auto_range_value(so_raw)
|
||||||
if start_offset == None:
|
if start_offset == None:
|
||||||
|
@ -513,8 +522,7 @@ def split_range(range):
|
||||||
return [range[0], range[-1]]
|
return [range[0], range[-1]]
|
||||||
elif isinstance(range, string_types) and re.match(RANGE_RE, range):
|
elif isinstance(range, string_types) and re.match(RANGE_RE, range):
|
||||||
return range.split("..", 1)
|
return range.split("..", 1)
|
||||||
else:
|
return None
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def norm_range_value(value):
|
def norm_range_value(value):
|
||||||
|
@ -570,6 +578,9 @@ def process_params(params):
|
||||||
processed_params = {}
|
processed_params = {}
|
||||||
for key, value in params.items():
|
for key, value in params.items():
|
||||||
if isinstance(value, list) or isinstance(value, tuple):
|
if isinstance(value, list) or isinstance(value, tuple):
|
||||||
|
if len(value) == 2 and value[0] == "file": # keep file parameter as is.
|
||||||
|
processed_params[key] = value
|
||||||
|
continue
|
||||||
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)}
|
||||||
processed_params.update(value_list)
|
processed_params.update(value_list)
|
||||||
elif value is not None:
|
elif value is not None:
|
||||||
|
@ -578,9 +589,28 @@ def process_params(params):
|
||||||
|
|
||||||
|
|
||||||
def cleanup_params(params):
|
def cleanup_params(params):
|
||||||
|
"""
|
||||||
|
Cleans and normalizes parameters when calculating signature in Upload API.
|
||||||
|
|
||||||
|
:param params:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
return dict([(k, __safe_value(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_params(params):
|
||||||
|
"""
|
||||||
|
Normalizes Admin API parameters.
|
||||||
|
|
||||||
|
:param params:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not params or not isinstance(params, dict):
|
||||||
|
return params
|
||||||
|
|
||||||
|
return dict([(k, __bool_string(v)) for (k, v) in params.items() if v is not None and not v == ""])
|
||||||
|
|
||||||
|
|
||||||
def sign_request(params, options):
|
def sign_request(params, options):
|
||||||
api_key = options.get("api_key", cloudinary.config().api_key)
|
api_key = options.get("api_key", cloudinary.config().api_key)
|
||||||
if not api_key:
|
if not api_key:
|
||||||
|
@ -1086,6 +1116,7 @@ def build_upload_params(**options):
|
||||||
"allowed_formats": options.get("allowed_formats") and encode_list(build_array(options["allowed_formats"])),
|
"allowed_formats": options.get("allowed_formats") and encode_list(build_array(options["allowed_formats"])),
|
||||||
"face_coordinates": encode_double_array(options.get("face_coordinates")),
|
"face_coordinates": encode_double_array(options.get("face_coordinates")),
|
||||||
"custom_coordinates": encode_double_array(options.get("custom_coordinates")),
|
"custom_coordinates": encode_double_array(options.get("custom_coordinates")),
|
||||||
|
"regions": json_encode(options.get("regions")),
|
||||||
"context": encode_context(options.get("context")),
|
"context": encode_context(options.get("context")),
|
||||||
"auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")),
|
"auto_tagging": options.get("auto_tagging") and str(options.get("auto_tagging")),
|
||||||
"responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")),
|
"responsive_breakpoints": generate_responsive_breakpoints_string(options.get("responsive_breakpoints")),
|
||||||
|
@ -1101,6 +1132,37 @@ def build_upload_params(**options):
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def handle_file_parameter(file, filename):
|
||||||
|
if not file:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if PathLibPathType and isinstance(file, PathLibPathType):
|
||||||
|
name = filename or file.name
|
||||||
|
data = file.read_bytes()
|
||||||
|
elif isinstance(file, string_types):
|
||||||
|
if is_remote_url(file):
|
||||||
|
# URL
|
||||||
|
name = None
|
||||||
|
data = file
|
||||||
|
else:
|
||||||
|
# file path
|
||||||
|
name = filename or file
|
||||||
|
with open(file, "rb") as opened:
|
||||||
|
data = opened.read()
|
||||||
|
elif hasattr(file, 'read') and callable(file.read):
|
||||||
|
# stream
|
||||||
|
data = file.read()
|
||||||
|
name = filename or (file.name if hasattr(file, 'name') and isinstance(file.name, str) else "stream")
|
||||||
|
elif isinstance(file, tuple):
|
||||||
|
name, data = file
|
||||||
|
else:
|
||||||
|
# Not a string, not a stream
|
||||||
|
name = filename or "file"
|
||||||
|
data = file
|
||||||
|
|
||||||
|
return (name, data) if name else data
|
||||||
|
|
||||||
|
|
||||||
def build_multi_and_sprite_params(**options):
|
def build_multi_and_sprite_params(**options):
|
||||||
"""
|
"""
|
||||||
Build params for multi, download_multi, generate_sprite, and download_generated_sprite methods
|
Build params for multi, download_multi, generate_sprite, and download_generated_sprite methods
|
||||||
|
@ -1166,8 +1228,21 @@ def __process_text_options(layer, layer_parameter):
|
||||||
|
|
||||||
|
|
||||||
def process_layer(layer, layer_parameter):
|
def process_layer(layer, layer_parameter):
|
||||||
if isinstance(layer, string_types) and layer.startswith("fetch:"):
|
if isinstance(layer, string_types):
|
||||||
layer = {"url": layer[len('fetch:'):]}
|
resource_type = None
|
||||||
|
if layer.startswith("fetch:"):
|
||||||
|
url = layer[len('fetch:'):]
|
||||||
|
elif layer.find(":fetch:", 0, 12) != -1:
|
||||||
|
resource_type, _, url = layer.split(":", 2)
|
||||||
|
else:
|
||||||
|
# nothing to process, a raw string, keep as is.
|
||||||
|
return layer
|
||||||
|
|
||||||
|
# handle remote fetch URL
|
||||||
|
layer = {"url": url, "type": "fetch"}
|
||||||
|
if resource_type:
|
||||||
|
layer["resource_type"] = resource_type
|
||||||
|
|
||||||
if not isinstance(layer, dict):
|
if not isinstance(layer, dict):
|
||||||
return layer
|
return layer
|
||||||
|
|
||||||
|
@ -1176,19 +1251,19 @@ def process_layer(layer, layer_parameter):
|
||||||
type = layer.get("type")
|
type = layer.get("type")
|
||||||
public_id = layer.get("public_id")
|
public_id = layer.get("public_id")
|
||||||
format = layer.get("format")
|
format = layer.get("format")
|
||||||
fetch = layer.get("url")
|
fetch_url = layer.get("url")
|
||||||
components = list()
|
components = list()
|
||||||
|
|
||||||
if text is not None and resource_type is None:
|
if text is not None and resource_type is None:
|
||||||
resource_type = "text"
|
resource_type = "text"
|
||||||
|
|
||||||
if fetch and resource_type is None:
|
if fetch_url and type is None:
|
||||||
resource_type = "fetch"
|
type = "fetch"
|
||||||
|
|
||||||
if public_id is not None and format is not None:
|
if public_id is not None and format is not None:
|
||||||
public_id = public_id + "." + format
|
public_id = public_id + "." + format
|
||||||
|
|
||||||
if public_id is None and resource_type != "text" and resource_type != "fetch":
|
if public_id is None and resource_type != "text" and type != "fetch":
|
||||||
raise ValueError("Must supply public_id for for non-text " + layer_parameter)
|
raise ValueError("Must supply public_id for for non-text " + layer_parameter)
|
||||||
|
|
||||||
if resource_type is not None and resource_type != "image":
|
if resource_type is not None and resource_type != "image":
|
||||||
|
@ -1212,8 +1287,6 @@ def process_layer(layer, layer_parameter):
|
||||||
|
|
||||||
if text is not None:
|
if text is not None:
|
||||||
var_pattern = VAR_NAME_RE
|
var_pattern = VAR_NAME_RE
|
||||||
match = re.findall(var_pattern, text)
|
|
||||||
|
|
||||||
parts = filter(lambda p: p is not None, re.split(var_pattern, text))
|
parts = filter(lambda p: p is not None, re.split(var_pattern, text))
|
||||||
encoded_text = []
|
encoded_text = []
|
||||||
for part in parts:
|
for part in parts:
|
||||||
|
@ -1223,11 +1296,9 @@ def process_layer(layer, layer_parameter):
|
||||||
encoded_text.append(smart_escape(smart_escape(part, r"([,/])")))
|
encoded_text.append(smart_escape(smart_escape(part, r"([,/])")))
|
||||||
|
|
||||||
text = ''.join(encoded_text)
|
text = ''.join(encoded_text)
|
||||||
# text = text.replace("%2C", "%252C")
|
|
||||||
# text = text.replace("/", "%252F")
|
|
||||||
components.append(text)
|
components.append(text)
|
||||||
elif resource_type == "fetch":
|
elif type == "fetch":
|
||||||
b64 = base64_encode_url(fetch)
|
b64 = base64url_encode(fetch_url)
|
||||||
components.append(b64)
|
components.append(b64)
|
||||||
else:
|
else:
|
||||||
public_id = public_id.replace("/", ':')
|
public_id = public_id.replace("/", ':')
|
||||||
|
@ -1359,8 +1430,7 @@ def normalize_expression(expression):
|
||||||
result = re.sub(replaceRE, translate_if, result)
|
result = re.sub(replaceRE, translate_if, result)
|
||||||
result = re.sub('[ _]+', '_', result)
|
result = re.sub('[ _]+', '_', result)
|
||||||
return result
|
return result
|
||||||
else:
|
return expression
|
||||||
return expression
|
|
||||||
|
|
||||||
|
|
||||||
def __join_pair(key, value):
|
def __join_pair(key, value):
|
||||||
|
@ -1368,8 +1438,7 @@ def __join_pair(key, value):
|
||||||
return None
|
return None
|
||||||
elif value is True:
|
elif value is True:
|
||||||
return key
|
return key
|
||||||
else:
|
return u"{0}=\"{1}\"".format(key, value)
|
||||||
return u"{0}=\"{1}\"".format(key, value)
|
|
||||||
|
|
||||||
|
|
||||||
def html_attrs(attrs, only=None):
|
def html_attrs(attrs, only=None):
|
||||||
|
@ -1379,10 +1448,15 @@ def html_attrs(attrs, only=None):
|
||||||
def __safe_value(v):
|
def __safe_value(v):
|
||||||
if isinstance(v, bool):
|
if isinstance(v, bool):
|
||||||
return "1" if v else "0"
|
return "1" if v else "0"
|
||||||
else:
|
return v
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
|
def __bool_string(v):
|
||||||
|
if isinstance(v, bool):
|
||||||
|
return "true" if v else "false"
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
def __crc(source):
|
def __crc(source):
|
||||||
return str((zlib.crc32(to_bytearray(source)) & 0xffffffff) % 5 + 1)
|
return str((zlib.crc32(to_bytearray(source)) & 0xffffffff) % 5 + 1)
|
||||||
|
|
||||||
|
|
6
lib/dateutil-stubs/METADATA.toml
Normal file
6
lib/dateutil-stubs/METADATA.toml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
version = "2.9.*"
|
||||||
|
upstream_repository = "https://github.com/dateutil/dateutil"
|
||||||
|
partial_stub = true
|
||||||
|
|
||||||
|
[tool.stubtest]
|
||||||
|
ignore_missing_stub = true
|
9
lib/dateutil-stubs/_common.pyi
Normal file
9
lib/dateutil-stubs/_common.pyi
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
from typing_extensions import Self
|
||||||
|
|
||||||
|
class weekday:
|
||||||
|
def __init__(self, weekday: int, n: int | None = None) -> None: ...
|
||||||
|
def __call__(self, n: int) -> Self: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
def __hash__(self) -> int: ...
|
||||||
|
weekday: int
|
||||||
|
n: int
|
8
lib/dateutil-stubs/easter.pyi
Normal file
8
lib/dateutil-stubs/easter.pyi
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
from datetime import date
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
EASTER_JULIAN: Literal[1]
|
||||||
|
EASTER_ORTHODOX: Literal[2]
|
||||||
|
EASTER_WESTERN: Literal[3]
|
||||||
|
|
||||||
|
def easter(year: int, method: Literal[1, 2, 3] = 3) -> date: ...
|
67
lib/dateutil-stubs/parser/__init__.pyi
Normal file
67
lib/dateutil-stubs/parser/__init__.pyi
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
from collections.abc import Callable, Mapping
|
||||||
|
from datetime import datetime, tzinfo
|
||||||
|
from typing import IO, Any
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
from .isoparser import isoparse as isoparse, isoparser as isoparser
|
||||||
|
|
||||||
|
_FileOrStr: TypeAlias = bytes | str | IO[str] | IO[Any]
|
||||||
|
_TzData: TypeAlias = tzinfo | int | str | None
|
||||||
|
_TzInfo: TypeAlias = Mapping[str, _TzData] | Callable[[str, int], _TzData]
|
||||||
|
|
||||||
|
class parserinfo:
|
||||||
|
JUMP: list[str]
|
||||||
|
WEEKDAYS: list[tuple[str, ...]]
|
||||||
|
MONTHS: list[tuple[str, ...]]
|
||||||
|
HMS: list[tuple[str, str, str]]
|
||||||
|
AMPM: list[tuple[str, str]]
|
||||||
|
UTCZONE: list[str]
|
||||||
|
PERTAIN: list[str]
|
||||||
|
TZOFFSET: dict[str, int]
|
||||||
|
def __init__(self, dayfirst: bool = False, yearfirst: bool = False) -> None: ...
|
||||||
|
def jump(self, name: str) -> bool: ...
|
||||||
|
def weekday(self, name: str) -> int | None: ...
|
||||||
|
def month(self, name: str) -> int | None: ...
|
||||||
|
def hms(self, name: str) -> int | None: ...
|
||||||
|
def ampm(self, name: str) -> int | None: ...
|
||||||
|
def pertain(self, name: str) -> bool: ...
|
||||||
|
def utczone(self, name: str) -> bool: ...
|
||||||
|
def tzoffset(self, name: str) -> int | None: ...
|
||||||
|
def convertyear(self, year: int) -> int: ...
|
||||||
|
def validate(self, res: datetime) -> bool: ...
|
||||||
|
|
||||||
|
class parser:
|
||||||
|
def __init__(self, info: parserinfo | None = None) -> None: ...
|
||||||
|
def parse(
|
||||||
|
self,
|
||||||
|
timestr: _FileOrStr,
|
||||||
|
default: datetime | None = None,
|
||||||
|
ignoretz: bool = False,
|
||||||
|
tzinfos: _TzInfo | None = None,
|
||||||
|
*,
|
||||||
|
dayfirst: bool | None = ...,
|
||||||
|
yearfirst: bool | None = ...,
|
||||||
|
fuzzy: bool = ...,
|
||||||
|
fuzzy_with_tokens: bool = ...,
|
||||||
|
) -> datetime: ...
|
||||||
|
|
||||||
|
DEFAULTPARSER: parser
|
||||||
|
|
||||||
|
def parse(
|
||||||
|
timestr: _FileOrStr,
|
||||||
|
parserinfo: parserinfo | None = None,
|
||||||
|
*,
|
||||||
|
dayfirst: bool | None = ...,
|
||||||
|
yearfirst: bool | None = ...,
|
||||||
|
ignoretz: bool = ...,
|
||||||
|
fuzzy: bool = ...,
|
||||||
|
fuzzy_with_tokens: bool = ...,
|
||||||
|
default: datetime | None = ...,
|
||||||
|
tzinfos: _TzInfo | None = ...,
|
||||||
|
) -> datetime: ...
|
||||||
|
|
||||||
|
class _tzparser: ...
|
||||||
|
|
||||||
|
DEFAULTTZPARSER: _tzparser
|
||||||
|
|
||||||
|
class ParserError(ValueError): ...
|
15
lib/dateutil-stubs/parser/isoparser.pyi
Normal file
15
lib/dateutil-stubs/parser/isoparser.pyi
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
from _typeshed import SupportsRead
|
||||||
|
from datetime import date, datetime, time, tzinfo
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
_Readable: TypeAlias = SupportsRead[str | bytes]
|
||||||
|
_TakesAscii: TypeAlias = str | bytes | _Readable
|
||||||
|
|
||||||
|
class isoparser:
|
||||||
|
def __init__(self, sep: str | bytes | None = None): ...
|
||||||
|
def isoparse(self, dt_str: _TakesAscii) -> datetime: ...
|
||||||
|
def parse_isodate(self, datestr: _TakesAscii) -> date: ...
|
||||||
|
def parse_isotime(self, timestr: _TakesAscii) -> time: ...
|
||||||
|
def parse_tzstr(self, tzstr: _TakesAscii, zero_as_utc: bool = True) -> tzinfo: ...
|
||||||
|
|
||||||
|
def isoparse(dt_str: _TakesAscii) -> datetime: ...
|
1
lib/dateutil-stubs/py.typed
Normal file
1
lib/dateutil-stubs/py.typed
Normal file
|
@ -0,0 +1 @@
|
||||||
|
partial
|
97
lib/dateutil-stubs/relativedelta.pyi
Normal file
97
lib/dateutil-stubs/relativedelta.pyi
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
from datetime import date, timedelta
|
||||||
|
from typing import SupportsFloat, TypeVar, overload
|
||||||
|
from typing_extensions import Self, TypeAlias
|
||||||
|
|
||||||
|
# See #9817 for why we reexport this here
|
||||||
|
from ._common import weekday as weekday
|
||||||
|
|
||||||
|
_DateT = TypeVar("_DateT", bound=date)
|
||||||
|
# Work around attribute and type having the same name.
|
||||||
|
_Weekday: TypeAlias = weekday
|
||||||
|
|
||||||
|
MO: weekday
|
||||||
|
TU: weekday
|
||||||
|
WE: weekday
|
||||||
|
TH: weekday
|
||||||
|
FR: weekday
|
||||||
|
SA: weekday
|
||||||
|
SU: weekday
|
||||||
|
|
||||||
|
class relativedelta:
|
||||||
|
years: int
|
||||||
|
months: int
|
||||||
|
days: int
|
||||||
|
leapdays: int
|
||||||
|
hours: int
|
||||||
|
minutes: int
|
||||||
|
seconds: int
|
||||||
|
microseconds: int
|
||||||
|
year: int | None
|
||||||
|
month: int | None
|
||||||
|
weekday: _Weekday | None
|
||||||
|
day: int | None
|
||||||
|
hour: int | None
|
||||||
|
minute: int | None
|
||||||
|
second: int | None
|
||||||
|
microsecond: int | None
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
dt1: date | None = None,
|
||||||
|
dt2: date | None = None,
|
||||||
|
years: int | None = 0,
|
||||||
|
months: int | None = 0,
|
||||||
|
days: int | None = 0,
|
||||||
|
leapdays: int | None = 0,
|
||||||
|
weeks: int | None = 0,
|
||||||
|
hours: int | None = 0,
|
||||||
|
minutes: int | None = 0,
|
||||||
|
seconds: int | None = 0,
|
||||||
|
microseconds: int | None = 0,
|
||||||
|
year: int | None = None,
|
||||||
|
month: int | None = None,
|
||||||
|
day: int | None = None,
|
||||||
|
weekday: int | _Weekday | None = None,
|
||||||
|
yearday: int | None = None,
|
||||||
|
nlyearday: int | None = None,
|
||||||
|
hour: int | None = None,
|
||||||
|
minute: int | None = None,
|
||||||
|
second: int | None = None,
|
||||||
|
microsecond: int | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
@property
|
||||||
|
def weeks(self) -> int: ...
|
||||||
|
@weeks.setter
|
||||||
|
def weeks(self, value: int) -> None: ...
|
||||||
|
def normalized(self) -> Self: ...
|
||||||
|
# TODO: use Union when mypy will handle it properly in overloaded operator
|
||||||
|
# methods (#2129, #1442, #1264 in mypy)
|
||||||
|
@overload
|
||||||
|
def __add__(self, other: relativedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __add__(self, other: timedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __add__(self, other: _DateT) -> _DateT: ...
|
||||||
|
@overload
|
||||||
|
def __radd__(self, other: relativedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __radd__(self, other: timedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __radd__(self, other: _DateT) -> _DateT: ...
|
||||||
|
@overload
|
||||||
|
def __rsub__(self, other: relativedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __rsub__(self, other: timedelta) -> Self: ...
|
||||||
|
@overload
|
||||||
|
def __rsub__(self, other: _DateT) -> _DateT: ...
|
||||||
|
def __sub__(self, other: relativedelta) -> Self: ...
|
||||||
|
def __neg__(self) -> Self: ...
|
||||||
|
def __bool__(self) -> bool: ...
|
||||||
|
def __nonzero__(self) -> bool: ...
|
||||||
|
def __mul__(self, other: SupportsFloat) -> Self: ...
|
||||||
|
def __rmul__(self, other: SupportsFloat) -> Self: ...
|
||||||
|
def __eq__(self, other: object) -> bool: ...
|
||||||
|
def __ne__(self, other: object) -> bool: ...
|
||||||
|
def __div__(self, other: SupportsFloat) -> Self: ...
|
||||||
|
def __truediv__(self, other: SupportsFloat) -> Self: ...
|
||||||
|
def __abs__(self) -> Self: ...
|
||||||
|
def __hash__(self) -> int: ...
|
111
lib/dateutil-stubs/rrule.pyi
Normal file
111
lib/dateutil-stubs/rrule.pyi
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
import datetime
|
||||||
|
from _typeshed import Incomplete
|
||||||
|
from collections.abc import Iterable, Iterator, Sequence
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
from ._common import weekday as weekdaybase
|
||||||
|
|
||||||
|
YEARLY: int
|
||||||
|
MONTHLY: int
|
||||||
|
WEEKLY: int
|
||||||
|
DAILY: int
|
||||||
|
HOURLY: int
|
||||||
|
MINUTELY: int
|
||||||
|
SECONDLY: int
|
||||||
|
|
||||||
|
class weekday(weekdaybase): ...
|
||||||
|
|
||||||
|
weekdays: tuple[weekday, weekday, weekday, weekday, weekday, weekday, weekday]
|
||||||
|
MO: weekday
|
||||||
|
TU: weekday
|
||||||
|
WE: weekday
|
||||||
|
TH: weekday
|
||||||
|
FR: weekday
|
||||||
|
SA: weekday
|
||||||
|
SU: weekday
|
||||||
|
|
||||||
|
class rrulebase:
|
||||||
|
def __init__(self, cache: bool = False) -> None: ...
|
||||||
|
def __iter__(self) -> Iterator[datetime.datetime]: ...
|
||||||
|
def __getitem__(self, item): ...
|
||||||
|
def __contains__(self, item): ...
|
||||||
|
def count(self): ...
|
||||||
|
def before(self, dt, inc: bool = False): ...
|
||||||
|
def after(self, dt, inc: bool = False): ...
|
||||||
|
def xafter(self, dt, count: Incomplete | None = None, inc: bool = False): ...
|
||||||
|
def between(self, after, before, inc: bool = False, count: int = 1): ...
|
||||||
|
|
||||||
|
class rrule(rrulebase):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
freq,
|
||||||
|
dtstart: datetime.date | None = None,
|
||||||
|
interval: int = 1,
|
||||||
|
wkst: weekday | int | None = None,
|
||||||
|
count: int | None = None,
|
||||||
|
until: datetime.date | int | None = None,
|
||||||
|
bysetpos: int | Iterable[int] | None = None,
|
||||||
|
bymonth: int | Iterable[int] | None = None,
|
||||||
|
bymonthday: int | Iterable[int] | None = None,
|
||||||
|
byyearday: int | Iterable[int] | None = None,
|
||||||
|
byeaster: int | Iterable[int] | None = None,
|
||||||
|
byweekno: int | Iterable[int] | None = None,
|
||||||
|
byweekday: int | weekday | Iterable[int] | Iterable[weekday] | None = None,
|
||||||
|
byhour: int | Iterable[int] | None = None,
|
||||||
|
byminute: int | Iterable[int] | None = None,
|
||||||
|
bysecond: int | Iterable[int] | None = None,
|
||||||
|
cache: bool = False,
|
||||||
|
) -> None: ...
|
||||||
|
def replace(self, **kwargs): ...
|
||||||
|
|
||||||
|
_RRule: TypeAlias = rrule
|
||||||
|
|
||||||
|
class _iterinfo:
|
||||||
|
rrule: _RRule
|
||||||
|
def __init__(self, rrule: _RRule) -> None: ...
|
||||||
|
yearlen: int | None
|
||||||
|
nextyearlen: int | None
|
||||||
|
yearordinal: int | None
|
||||||
|
yearweekday: int | None
|
||||||
|
mmask: Sequence[int] | None
|
||||||
|
mdaymask: Sequence[int] | None
|
||||||
|
nmdaymask: Sequence[int] | None
|
||||||
|
wdaymask: Sequence[int] | None
|
||||||
|
mrange: Sequence[int] | None
|
||||||
|
wnomask: Sequence[int] | None
|
||||||
|
nwdaymask: Sequence[int] | None
|
||||||
|
eastermask: Sequence[int] | None
|
||||||
|
lastyear: int | None
|
||||||
|
lastmonth: int | None
|
||||||
|
def rebuild(self, year, month): ...
|
||||||
|
def ydayset(self, year, month, day): ...
|
||||||
|
def mdayset(self, year, month, day): ...
|
||||||
|
def wdayset(self, year, month, day): ...
|
||||||
|
def ddayset(self, year, month, day): ...
|
||||||
|
def htimeset(self, hour, minute, second): ...
|
||||||
|
def mtimeset(self, hour, minute, second): ...
|
||||||
|
def stimeset(self, hour, minute, second): ...
|
||||||
|
|
||||||
|
class rruleset(rrulebase):
|
||||||
|
class _genitem:
|
||||||
|
dt: Incomplete
|
||||||
|
genlist: list[Incomplete]
|
||||||
|
gen: Incomplete
|
||||||
|
def __init__(self, genlist, gen) -> None: ...
|
||||||
|
def __next__(self) -> None: ...
|
||||||
|
next = __next__
|
||||||
|
def __lt__(self, other) -> bool: ...
|
||||||
|
def __gt__(self, other) -> bool: ...
|
||||||
|
def __eq__(self, other) -> bool: ...
|
||||||
|
def __ne__(self, other) -> bool: ...
|
||||||
|
|
||||||
|
def __init__(self, cache: bool = False) -> None: ...
|
||||||
|
def rrule(self, rrule: _RRule): ...
|
||||||
|
def rdate(self, rdate): ...
|
||||||
|
def exrule(self, exrule): ...
|
||||||
|
def exdate(self, exdate): ...
|
||||||
|
|
||||||
|
class _rrulestr:
|
||||||
|
def __call__(self, s, **kwargs) -> rrule | rruleset: ...
|
||||||
|
|
||||||
|
rrulestr: _rrulestr
|
15
lib/dateutil-stubs/tz/__init__.pyi
Normal file
15
lib/dateutil-stubs/tz/__init__.pyi
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
from .tz import (
|
||||||
|
datetime_ambiguous as datetime_ambiguous,
|
||||||
|
datetime_exists as datetime_exists,
|
||||||
|
gettz as gettz,
|
||||||
|
resolve_imaginary as resolve_imaginary,
|
||||||
|
tzfile as tzfile,
|
||||||
|
tzical as tzical,
|
||||||
|
tzlocal as tzlocal,
|
||||||
|
tzoffset as tzoffset,
|
||||||
|
tzrange as tzrange,
|
||||||
|
tzstr as tzstr,
|
||||||
|
tzutc as tzutc,
|
||||||
|
)
|
||||||
|
|
||||||
|
UTC: tzutc
|
28
lib/dateutil-stubs/tz/_common.pyi
Normal file
28
lib/dateutil-stubs/tz/_common.pyi
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
import abc
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
from typing import ClassVar
|
||||||
|
|
||||||
|
def tzname_in_python2(namefunc): ...
|
||||||
|
def enfold(dt: datetime, fold: int = 1): ...
|
||||||
|
|
||||||
|
class _DatetimeWithFold(datetime):
|
||||||
|
@property
|
||||||
|
def fold(self): ...
|
||||||
|
|
||||||
|
# Doesn't actually have ABCMeta as the metaclass at runtime,
|
||||||
|
# but mypy complains if we don't have it in the stub.
|
||||||
|
# See discussion in #8908
|
||||||
|
class _tzinfo(tzinfo, metaclass=abc.ABCMeta):
|
||||||
|
def is_ambiguous(self, dt: datetime) -> bool: ...
|
||||||
|
def fromutc(self, dt: datetime) -> datetime: ...
|
||||||
|
|
||||||
|
class tzrangebase(_tzinfo):
|
||||||
|
def __init__(self) -> None: ...
|
||||||
|
def utcoffset(self, dt: datetime | None) -> timedelta | None: ...
|
||||||
|
def dst(self, dt: datetime | None) -> timedelta | None: ...
|
||||||
|
def tzname(self, dt: datetime | None) -> str: ...
|
||||||
|
def fromutc(self, dt: datetime) -> datetime: ...
|
||||||
|
def is_ambiguous(self, dt: datetime) -> bool: ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
__reduce__ = object.__reduce__
|
115
lib/dateutil-stubs/tz/tz.pyi
Normal file
115
lib/dateutil-stubs/tz/tz.pyi
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
import datetime
|
||||||
|
from _typeshed import Incomplete
|
||||||
|
from typing import ClassVar, Literal, Protocol, TypeVar
|
||||||
|
|
||||||
|
from ..relativedelta import relativedelta
|
||||||
|
from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase
|
||||||
|
|
||||||
|
_DT = TypeVar("_DT", bound=datetime.datetime)
|
||||||
|
|
||||||
|
ZERO: datetime.timedelta
|
||||||
|
EPOCH: datetime.datetime
|
||||||
|
EPOCHORDINAL: int
|
||||||
|
|
||||||
|
class tzutc(datetime.tzinfo):
|
||||||
|
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||||
|
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||||
|
def fromutc(self, dt: _DT) -> _DT: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
__reduce__ = object.__reduce__
|
||||||
|
|
||||||
|
class tzoffset(datetime.tzinfo):
|
||||||
|
def __init__(self, name, offset) -> None: ...
|
||||||
|
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||||
|
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||||
|
def fromutc(self, dt: _DT) -> _DT: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
__reduce__ = object.__reduce__
|
||||||
|
@classmethod
|
||||||
|
def instance(cls, name, offset) -> tzoffset: ...
|
||||||
|
|
||||||
|
class tzlocal(_tzinfo):
|
||||||
|
def __init__(self) -> None: ...
|
||||||
|
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||||
|
def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
__reduce__ = object.__reduce__
|
||||||
|
|
||||||
|
class _ttinfo:
|
||||||
|
def __init__(self) -> None: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
|
||||||
|
class _TZFileReader(Protocol):
|
||||||
|
# optional attribute:
|
||||||
|
# name: str
|
||||||
|
def read(self, size: int, /) -> bytes: ...
|
||||||
|
def seek(self, target: int, whence: Literal[1], /) -> object: ...
|
||||||
|
|
||||||
|
class tzfile(_tzinfo):
|
||||||
|
def __init__(self, fileobj: str | _TZFileReader, filename: str | None = None) -> None: ...
|
||||||
|
def is_ambiguous(self, dt: datetime.datetime | None, idx: int | None = None) -> bool: ...
|
||||||
|
def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ...
|
||||||
|
def tzname(self, dt: datetime.datetime | None) -> str: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
__hash__: ClassVar[None] # type: ignore[assignment]
|
||||||
|
def __ne__(self, other): ...
|
||||||
|
def __reduce__(self): ...
|
||||||
|
def __reduce_ex__(self, protocol): ...
|
||||||
|
|
||||||
|
class tzrange(tzrangebase):
|
||||||
|
hasdst: bool
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
stdabbr: str,
|
||||||
|
stdoffset: int | datetime.timedelta | None = None,
|
||||||
|
dstabbr: str | None = None,
|
||||||
|
dstoffset: int | datetime.timedelta | None = None,
|
||||||
|
start: relativedelta | None = None,
|
||||||
|
end: relativedelta | None = None,
|
||||||
|
) -> None: ...
|
||||||
|
def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ...
|
||||||
|
def __eq__(self, other): ...
|
||||||
|
|
||||||
|
class tzstr(tzrange):
|
||||||
|
hasdst: bool
|
||||||
|
def __init__(self, s: str, posix_offset: bool = False) -> None: ...
|
||||||
|
@classmethod
|
||||||
|
def instance(cls, name, offset) -> tzoffset: ...
|
||||||
|
|
||||||
|
class _ICalReader(Protocol):
|
||||||
|
# optional attribute:
|
||||||
|
# name: str
|
||||||
|
def read(self) -> str: ...
|
||||||
|
|
||||||
|
class tzical:
|
||||||
|
def __init__(self, fileobj: str | _ICalReader) -> None: ...
|
||||||
|
def keys(self): ...
|
||||||
|
def get(self, tzid: Incomplete | None = None): ...
|
||||||
|
|
||||||
|
TZFILES: list[str]
|
||||||
|
TZPATHS: list[str]
|
||||||
|
|
||||||
|
def datetime_exists(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
|
||||||
|
def datetime_ambiguous(dt: datetime.datetime, tz: datetime.tzinfo | None = None) -> bool: ...
|
||||||
|
def resolve_imaginary(dt: datetime.datetime) -> datetime.datetime: ...
|
||||||
|
|
||||||
|
class _GetTZ:
|
||||||
|
def __call__(self, name: str | None = ...) -> datetime.tzinfo | None: ...
|
||||||
|
def nocache(self, name: str | None) -> datetime.tzinfo | None: ...
|
||||||
|
|
||||||
|
gettz: _GetTZ
|
5
lib/dateutil-stubs/utils.pyi
Normal file
5
lib/dateutil-stubs/utils.pyi
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
|
||||||
|
def default_tzinfo(dt: datetime, tzinfo: tzinfo) -> datetime: ...
|
||||||
|
def today(tzinfo: tzinfo | None = None) -> datetime: ...
|
||||||
|
def within_delta(dt1: datetime, dt2: datetime, delta: timedelta) -> bool: ...
|
17
lib/dateutil-stubs/zoneinfo/__init__.pyi
Normal file
17
lib/dateutil-stubs/zoneinfo/__init__.pyi
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
from _typeshed import Incomplete
|
||||||
|
from typing import IO
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"]
|
||||||
|
|
||||||
|
_MetadataType: TypeAlias = dict[str, Incomplete]
|
||||||
|
|
||||||
|
class ZoneInfoFile:
|
||||||
|
zones: dict[Incomplete, Incomplete]
|
||||||
|
metadata: _MetadataType | None
|
||||||
|
def __init__(self, zonefile_stream: IO[bytes] | None = None) -> None: ...
|
||||||
|
def get(self, name, default: Incomplete | None = None): ...
|
||||||
|
|
||||||
|
def get_zonefile_instance(new_instance: bool = False) -> ZoneInfoFile: ...
|
||||||
|
def gettz(name): ...
|
||||||
|
def gettz_db_metadata() -> _MetadataType: ...
|
11
lib/dateutil-stubs/zoneinfo/rebuild.pyi
Normal file
11
lib/dateutil-stubs/zoneinfo/rebuild.pyi
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
from _typeshed import Incomplete, StrOrBytesPath
|
||||||
|
from collections.abc import Sequence
|
||||||
|
from tarfile import TarInfo
|
||||||
|
|
||||||
|
def rebuild(
|
||||||
|
filename: StrOrBytesPath,
|
||||||
|
tag: Incomplete | None = None,
|
||||||
|
format: str = "gz",
|
||||||
|
zonegroups: Sequence[str | TarInfo] = [],
|
||||||
|
metadata: Incomplete | None = None,
|
||||||
|
) -> None: ...
|
|
@ -1,4 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ._version import version as __version__
|
from ._version import version as __version__
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -6,3 +8,17 @@ except ImportError:
|
||||||
|
|
||||||
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
|
||||||
'utils', 'zoneinfo']
|
'utils', 'zoneinfo']
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
if name in __all__:
|
||||||
|
return importlib.import_module("." + name, __name__)
|
||||||
|
raise AttributeError(
|
||||||
|
"module {!r} has not attribute {!r}".format(__name__, name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def __dir__():
|
||||||
|
# __dir__ should include all the lazy-importable modules as well.
|
||||||
|
return [x for x in globals() if x not in sys.modules] + __all__
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# coding: utf-8
|
|
||||||
# file generated by setuptools_scm
|
# file generated by setuptools_scm
|
||||||
# don't change, don't track in version control
|
# don't change, don't track in version control
|
||||||
version = '2.8.2'
|
__version__ = version = '2.9.0.post0'
|
||||||
version_tuple = (2, 8, 2)
|
__version_tuple__ = version_tuple = (2, 9, 0)
|
||||||
|
|
|
@ -72,7 +72,7 @@ class isoparser(object):
|
||||||
Common:
|
Common:
|
||||||
|
|
||||||
- ``YYYY``
|
- ``YYYY``
|
||||||
- ``YYYY-MM`` or ``YYYYMM``
|
- ``YYYY-MM``
|
||||||
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
- ``YYYY-MM-DD`` or ``YYYYMMDD``
|
||||||
|
|
||||||
Uncommon:
|
Uncommon:
|
||||||
|
|
|
@ -48,7 +48,7 @@ class relativedelta(object):
|
||||||
the corresponding arithmetic operation on the original datetime value
|
the corresponding arithmetic operation on the original datetime value
|
||||||
with the information in the relativedelta.
|
with the information in the relativedelta.
|
||||||
|
|
||||||
weekday:
|
weekday:
|
||||||
One of the weekday instances (MO, TU, etc) available in the
|
One of the weekday instances (MO, TU, etc) available in the
|
||||||
relativedelta module. These instances may receive a parameter N,
|
relativedelta module. These instances may receive a parameter N,
|
||||||
specifying the Nth weekday, which could be positive or negative
|
specifying the Nth weekday, which could be positive or negative
|
||||||
|
|
|
@ -182,7 +182,7 @@ class rrulebase(object):
|
||||||
# __len__() introduces a large performance penalty.
|
# __len__() introduces a large performance penalty.
|
||||||
def count(self):
|
def count(self):
|
||||||
""" Returns the number of recurrences in this set. It will have go
|
""" Returns the number of recurrences in this set. It will have go
|
||||||
trough the whole recurrence, if this hasn't been done before. """
|
through the whole recurrence, if this hasn't been done before. """
|
||||||
if self._len is None:
|
if self._len is None:
|
||||||
for x in self:
|
for x in self:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -34,7 +34,7 @@ except ImportError:
|
||||||
from warnings import warn
|
from warnings import warn
|
||||||
|
|
||||||
ZERO = datetime.timedelta(0)
|
ZERO = datetime.timedelta(0)
|
||||||
EPOCH = datetime.datetime.utcfromtimestamp(0)
|
EPOCH = datetime.datetime(1970, 1, 1, 0, 0)
|
||||||
EPOCHORDINAL = EPOCH.toordinal()
|
EPOCHORDINAL = EPOCH.toordinal()
|
||||||
|
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue